feat: 更新博客框架到 v2。 (#3)
Some checks failed
🚀 Build and deploy by ftp / 🎉 Deploy (push) Failing after 10m33s

Co-authored-by: Ivan Li <ivanli2048@gmail.com>
Reviewed-on: #3
This commit is contained in:
2023-08-16 23:29:22 +08:00
parent 02ab7d11b2
commit de1da22508
143 changed files with 14372 additions and 12297 deletions

View File

@@ -1,132 +0,0 @@
const fs = require('fs');
const path = require('path');
const inquirer = require('inquirer');
const dedent = require('dedent');
const root = process.cwd();
const getAuthors = () => {
const authorPath = path.join(root, 'data', 'authors');
const authorList = fs
.readdirSync(authorPath)
.map((filename) => path.parse(filename).name);
return authorList;
};
const getLayouts = () => {
const layoutPath = path.join(root, 'layouts');
const layoutList = fs
.readdirSync(layoutPath)
.map((filename) => path.parse(filename).name)
.filter((file) => file.toLowerCase().includes('post'));
return layoutList;
};
const genFrontMatter = (answers) => {
let d = new Date();
const date = [
d.getFullYear(),
('0' + (d.getMonth() + 1)).slice(-2),
('0' + d.getDate()).slice(-2),
].join('-');
const tagArray = answers.tags.split(',');
tagArray.forEach((tag, index) => (tagArray[index] = tag.trim()));
const tags = "'" + tagArray.join("','") + "'";
const authorArray =
answers.authors.length > 0 ? "'" + answers.authors.join("','") + "'" : '';
let frontMatter = dedent`---
title: ${answers.title ? answers.title : 'Untitled'}
date: '${date}'
tags: [${answers.tags ? tags : ''}]
draft: ${answers.draft === 'yes' ? true : false}
summary: ${answers.summary ? answers.summary : ' '}
images: []
layout: ${answers.layout}
canonicalUrl: ${answers.canonicalUrl}
`;
if (answers.authors.length > 0) {
frontMatter = frontMatter + '\n' + `authors: [${authorArray}]`;
}
frontMatter = frontMatter + '\n---';
return frontMatter;
};
inquirer
.prompt([
{
name: 'title',
message: 'Enter post title:',
type: 'input',
},
{
name: 'extension',
message: 'Choose post extension:',
type: 'list',
choices: ['mdx', 'md'],
},
{
name: 'authors',
message: 'Choose authors:',
type: 'checkbox',
choices: getAuthors,
},
{
name: 'summary',
message: 'Enter post summary:',
type: 'input',
},
{
name: 'draft',
message: 'Set post as draft?',
type: 'list',
choices: ['yes', 'no'],
},
{
name: 'tags',
message: 'Any Tags? Separate them with , or leave empty if no tags.',
type: 'input',
},
{
name: 'layout',
message: 'Select layout',
type: 'list',
choices: getLayouts,
},
{
name: 'canonicalUrl',
message: 'Enter canonical url:',
type: 'input',
},
])
.then((answers) => {
// Remove special characters and replace space with -
const fileName = answers.title
.toLowerCase()
.replace(/[^a-zA-Z0-9 ]/g, '')
.replace(/ /g, '-')
.replace(/-+/g, '-');
const frontMatter = genFrontMatter(answers);
if (!fs.existsSync('data/blog'))
fs.mkdirSync('data/blog', { recursive: true });
const filePath = `data/blog/${fileName ? fileName : 'untitled'}.${
answers.extension ? answers.extension : 'md'
}`;
fs.writeFile(filePath, frontMatter, { flag: 'wx' }, (err) => {
if (err) {
throw err;
} else {
console.log(`Blog post generated successfully at ${filePath}`);
}
});
})
.catch((error) => {
if (error.isTtyError) {
console.log("Prompt couldn't be rendered in the current environment");
} else {
console.log('Something went wrong, sorry!');
}
});

View File

@@ -1,69 +0,0 @@
const fs = require('fs');
const globby = require('globby');
const matter = require('gray-matter');
const prettier = require('prettier');
const siteMetadata = require('../data/siteMetadata');
(async () => {
const prettierConfig = await prettier.resolveConfig('./.prettierrc.js');
const pages = await globby([
'pages/*.js',
'pages/*.tsx',
'data/blog/**/*.mdx',
'data/blog/**/*.md',
'public/tags/**/*.xml',
'!pages/_*.js',
'!pages/_*.tsx',
'!pages/api',
]);
const sitemap = `
<?xml version="1.0" encoding="UTF-8"?>
<urlset xmlns="http://www.sitemaps.org/schemas/sitemap/0.9">
${pages
.map((page) => {
// Exclude drafts from the sitemap
if (page.search('.md') >= 1 && fs.existsSync(page)) {
const source = fs.readFileSync(page, 'utf8');
const fm = matter(source);
if (fm.data.draft) {
return;
}
if (fm.data.canonicalUrl) {
return;
}
}
const path = page
.replace('pages/', '/')
.replace('data/blog', '/blog')
.replace('public/', '/')
.replace('.js', '')
.replace('.tsx', '')
.replace('.mdx', '')
.replace('.md', '')
.replace('/feed.xml', '');
const route = path === '/index' ? '' : path;
if (
page.search('pages/404.') > -1 ||
page.search(`pages/blog/[...slug].`) > -1
) {
return;
}
return `
<url>
<loc>${siteMetadata.siteUrl}${route}</loc>
</url>
`;
})
.join('')}
</urlset>
`;
const formatted = prettier.format(sitemap, {
...prettierConfig,
parser: 'html',
});
// eslint-disable-next-line no-sync
fs.writeFileSync('public/sitemap.xml', formatted);
})();

View File

@@ -1,134 +0,0 @@
#!/usr/bin/env node
// Adapted from https://github.com/hashicorp/next-remote-watch
// A copy of next-remote-watch with an additional ws reload emitter.
// The app listens to the event and triggers a client-side router refresh
// see components/ClientReload.js
const chalk = require('chalk');
const chokidar = require('chokidar');
const program = require('commander');
const http = require('http');
const SocketIO = require('socket.io');
const express = require('express');
const spawn = require('child_process').spawn;
const next = require('next');
const path = require('path');
const { parse } = require('url');
const pkg = require('../package.json');
const defaultWatchEvent = 'change';
program.storeOptionsAsProperties().version(pkg.version);
program
.option('-r, --root [dir]', 'root directory of your nextjs app')
.option(
'-s, --script [path]',
'path to the script you want to trigger on a watcher event',
false
)
.option('-c, --command [cmd]', 'command to execute on a watcher event', false)
.option(
'-e, --event [name]',
`name of event to watch, defaults to ${defaultWatchEvent}`,
defaultWatchEvent
)
.option(
'-p, --polling [name]',
`use polling for the watcher, defaults to false`,
false
)
.parse(process.argv);
const shell = process.env.SHELL;
const app = next({ dev: true, dir: program.root || process.cwd() });
const port = parseInt(process.env.PORT, 10) || 3000;
const handle = app.getRequestHandler();
app.prepare().then(() => {
// if directories are provided, watch them for changes and trigger reload
if (program.args.length > 0) {
chokidar
.watch(program.args, { usePolling: Boolean(program.polling) })
.on(
program.event,
async (filePathContext, eventContext = defaultWatchEvent) => {
// Emit changes via socketio
io.sockets.emit('reload', filePathContext);
app.server.hotReloader.send('building');
if (program.command) {
// Use spawn here so that we can pipe stdio from the command without buffering
spawn(
shell,
[
'-c',
program.command
.replace(/\{event\}/gi, filePathContext)
.replace(/\{path\}/gi, eventContext),
],
{
stdio: 'inherit',
}
);
}
if (program.script) {
try {
// find the path of your --script script
const scriptPath = path.join(
process.cwd(),
program.script.toString()
);
// require your --script script
const executeFile = require(scriptPath);
// run the exported function from your --script script
executeFile(filePathContext, eventContext);
} catch (e) {
console.error('Remote script failed');
console.error(e);
return e;
}
}
app.server.hotReloader.send('reloadPage');
}
);
}
// create an express server
const expressApp = express();
const server = http.createServer(expressApp);
// watch files with socketIO
const io = SocketIO(server);
// special handling for mdx reload route
const reloadRoute = express.Router();
reloadRoute.use(express.json());
reloadRoute.all('/', (req, res) => {
// log message if present
const msg = req.body.message;
const color = req.body.color;
msg && console.log(color ? chalk[color](msg) : msg);
// reload the nextjs app
app.server.hotReloader.send('building');
app.server.hotReloader.send('reloadPage');
res.end('Reload initiated');
});
expressApp.use('/__next_reload', reloadRoute);
// handle all other routes with next.js
expressApp.all('*', (req, res) => handle(req, res, parse(req.url, true)));
// fire it up
server.listen(port, (err) => {
if (err) throw err;
console.log(`> Ready on http://localhost:${port}`);
});
});

7
scripts/postbuild.mjs Normal file
View File

@@ -0,0 +1,7 @@
import rss from './rss.mjs'
async function postbuild() {
await rss()
}
postbuild()

62
scripts/rss.mjs Normal file
View File

@@ -0,0 +1,62 @@
import { writeFileSync, mkdirSync } from 'fs'
import path from 'path'
import GithubSlugger from 'github-slugger'
import { escape } from 'pliny/utils/htmlEscaper.js'
import siteMetadata from '../data/siteMetadata.js'
import tagData from '../app/tag-data.json' assert { type: 'json' }
import { allBlogs } from '../.contentlayer/generated/index.mjs'
const generateRssItem = (config, post) => `
<item>
<guid>${config.siteUrl}/blog/${post.slug}</guid>
<title>${escape(post.title)}</title>
<link>${config.siteUrl}/blog/${post.slug}</link>
${post.summary && `<description>${escape(post.summary)}</description>`}
<pubDate>${new Date(post.date).toUTCString()}</pubDate>
<author>${config.email} (${config.author})</author>
${post.tags && post.tags.map((t) => `<category>${t}</category>`).join('')}
</item>
`
const generateRss = (config, posts, page = 'feed.xml') => `
<rss version="2.0" xmlns:atom="http://www.w3.org/2005/Atom">
<channel>
<title>${escape(config.title)}</title>
<link>${config.siteUrl}/blog</link>
<description>${escape(config.description)}</description>
<language>${config.language}</language>
<managingEditor>${config.email} (${config.author})</managingEditor>
<webMaster>${config.email} (${config.author})</webMaster>
<lastBuildDate>${new Date(posts[0].date).toUTCString()}</lastBuildDate>
<atom:link href="${config.siteUrl}/${page}" rel="self" type="application/rss+xml"/>
${posts.map((post) => generateRssItem(config, post)).join('')}
</channel>
</rss>
`
async function generateRSS(config, allBlogs, page = 'feed.xml') {
const publishPosts = allBlogs.filter((post) => post.draft !== true)
// RSS for blog post
if (publishPosts.length > 0) {
const rss = generateRss(config, publishPosts)
writeFileSync(`./public/${page}`, rss)
}
if (publishPosts.length > 0) {
for (const tag of Object.keys(tagData)) {
const filteredPosts = allBlogs.filter((post) =>
post.tags.map((t) => GithubSlugger.slug(t)).includes(tag)
)
const rss = generateRss(config, filteredPosts, `tags/${tag}/${page}`)
const rssPath = path.join('public', 'tags', tag)
mkdirSync(rssPath, { recursive: true })
writeFileSync(path.join(rssPath, page), rss)
}
}
}
const rss = () => {
generateRSS(siteMetadata, allBlogs)
console.log('RSS feed generated...')
}
export default rss