refactor: 改用 TypeScript。close #1.
This commit is contained in:
@ -1,8 +1,9 @@
|
||||
import { escape } from '@/lib/utils/htmlEscaper'
|
||||
|
||||
import siteMetadata from '@/data/siteMetadata'
|
||||
import { PostFrontMatter } from 'types/PostFrontMatter'
|
||||
|
||||
const generateRssItem = (post) => `
|
||||
const generateRssItem = (post: PostFrontMatter) => `
|
||||
<item>
|
||||
<guid>${siteMetadata.siteUrl}/blog/${post.slug}</guid>
|
||||
<title>${escape(post.title)}</title>
|
||||
@ -14,7 +15,7 @@ const generateRssItem = (post) => `
|
||||
</item>
|
||||
`
|
||||
|
||||
const generateRss = (posts, page = 'feed.xml') => `
|
||||
const generateRss = (posts: PostFrontMatter[], page = 'feed.xml') => `
|
||||
<rss version="2.0" xmlns:atom="http://www.w3.org/2005/Atom">
|
||||
<channel>
|
||||
<title>${escape(siteMetadata.title)}</title>
|
@ -3,8 +3,10 @@ import fs from 'fs'
|
||||
import matter from 'gray-matter'
|
||||
import path from 'path'
|
||||
import readingTime from 'reading-time'
|
||||
import { visit } from 'unist-util-visit'
|
||||
import getAllFilesRecursively from './utils/files'
|
||||
import { PostFrontMatter } from 'types/PostFrontMatter'
|
||||
import { AuthorFrontMatter } from 'types/AuthorFrontMatter'
|
||||
import { Toc } from 'types/Toc'
|
||||
// Remark packages
|
||||
import remarkGfm from 'remark-gfm'
|
||||
import remarkFootnotes from 'remark-footnotes'
|
||||
@ -23,24 +25,24 @@ import rehypePresetMinify from 'rehype-preset-minify'
|
||||
|
||||
const root = process.cwd()
|
||||
|
||||
export function getFiles(type) {
|
||||
export function getFiles(type: 'blog' | 'authors') {
|
||||
const prefixPaths = path.join(root, 'data', type)
|
||||
const files = getAllFilesRecursively(prefixPaths)
|
||||
// Only want to return blog/path and ignore root, replace is needed to work on Windows
|
||||
return files.map((file) => file.slice(prefixPaths.length + 1).replace(/\\/g, '/'))
|
||||
}
|
||||
|
||||
export function formatSlug(slug) {
|
||||
export function formatSlug(slug: string) {
|
||||
return slug.replace(/\.(mdx|md)/, '')
|
||||
}
|
||||
|
||||
export function dateSortDesc(a, b) {
|
||||
export function dateSortDesc(a: string, b: string) {
|
||||
if (a > b) return -1
|
||||
if (a < b) return 1
|
||||
return 0
|
||||
}
|
||||
|
||||
export async function getFileBySlug(type, slug) {
|
||||
export async function getFileBySlug<T>(type: 'authors' | 'blog', slug: string | string[]) {
|
||||
const mdxPath = path.join(root, 'data', type, `${slug}.mdx`)
|
||||
const mdPath = path.join(root, 'data', type, `${slug}.md`)
|
||||
const source = fs.existsSync(mdxPath)
|
||||
@ -54,7 +56,7 @@ export async function getFileBySlug(type, slug) {
|
||||
process.env.ESBUILD_BINARY_PATH = path.join(root, 'node_modules', 'esbuild', 'bin', 'esbuild')
|
||||
}
|
||||
|
||||
let toc = []
|
||||
const toc: Toc = []
|
||||
|
||||
const { code, frontmatter } = await bundleMDX({
|
||||
source,
|
||||
@ -107,14 +109,14 @@ export async function getFileBySlug(type, slug) {
|
||||
}
|
||||
}
|
||||
|
||||
export async function getAllFilesFrontMatter(folder) {
|
||||
export async function getAllFilesFrontMatter(folder: 'blog') {
|
||||
const prefixPaths = path.join(root, 'data', folder)
|
||||
|
||||
const files = getAllFilesRecursively(prefixPaths)
|
||||
|
||||
const allFrontMatter = []
|
||||
const allFrontMatter: PostFrontMatter[] = []
|
||||
|
||||
files.forEach((file) => {
|
||||
files.forEach((file: string) => {
|
||||
// Replace is needed to work on Windows
|
||||
const fileName = file.slice(prefixPaths.length + 1).replace(/\\/g, '/')
|
||||
// Remove Unexpected File
|
||||
@ -122,8 +124,9 @@ export async function getAllFilesFrontMatter(folder) {
|
||||
return
|
||||
}
|
||||
const source = fs.readFileSync(file, 'utf8')
|
||||
const { data: frontmatter } = matter(source)
|
||||
if (frontmatter.draft !== true) {
|
||||
const matterFile = matter(source)
|
||||
const frontmatter = matterFile.data as AuthorFrontMatter | PostFrontMatter
|
||||
if ('draft' in frontmatter && frontmatter.draft !== true) {
|
||||
allFrontMatter.push({
|
||||
...frontmatter,
|
||||
slug: formatSlug(fileName),
|
@ -1,8 +1,9 @@
|
||||
import { Parent } from 'unist'
|
||||
import { visit } from 'unist-util-visit'
|
||||
|
||||
export default function remarkCodeTitles() {
|
||||
return (tree) =>
|
||||
visit(tree, 'code', (node, index, parent) => {
|
||||
return (tree: Parent & { lang?: string }) =>
|
||||
visit(tree, 'code', (node: Parent & { lang?: string }, index, parent: Parent) => {
|
||||
const nodeLang = node.lang || ''
|
||||
let language = ''
|
||||
let title = ''
|
@ -1,9 +1,12 @@
|
||||
import { Parent } from 'unist'
|
||||
import { VFile } from 'vfile'
|
||||
import { visit } from 'unist-util-visit'
|
||||
import { load } from 'js-yaml'
|
||||
|
||||
export default function extractFrontmatter() {
|
||||
return (tree, file) => {
|
||||
visit(tree, 'yaml', (node, index, parent) => {
|
||||
return (tree: Parent, file: VFile) => {
|
||||
visit(tree, 'yaml', (node: Parent) => {
|
||||
//@ts-ignore
|
||||
file.data.frontmatter = load(node.value)
|
||||
})
|
||||
}
|
@ -1,15 +1,24 @@
|
||||
import { Parent, Node, Literal } from 'unist'
|
||||
import { visit } from 'unist-util-visit'
|
||||
import sizeOf from 'image-size'
|
||||
import fs from 'fs'
|
||||
|
||||
type ImageNode = Parent & {
|
||||
url: string
|
||||
alt: string
|
||||
name: string
|
||||
attributes: (Literal & { name: string })[]
|
||||
}
|
||||
|
||||
export default function remarkImgToJsx() {
|
||||
return (tree) => {
|
||||
return (tree: Node) => {
|
||||
visit(
|
||||
tree,
|
||||
// only visit p tags that contain an img element
|
||||
(node) => node.type === 'paragraph' && node.children.some((n) => n.type === 'image'),
|
||||
(node) => {
|
||||
const imageNode = node.children.find((n) => n.type === 'image')
|
||||
(node: Parent): node is Parent =>
|
||||
node.type === 'paragraph' && node.children.some((n) => n.type === 'image'),
|
||||
(node: Parent) => {
|
||||
const imageNode = node.children.find((n) => n.type === 'image') as ImageNode
|
||||
|
||||
// only local files
|
||||
if (fs.existsSync(`${process.cwd()}/public${imageNode.url}`)) {
|
@ -1,10 +1,12 @@
|
||||
//@ts-nocheck
|
||||
import { Parent } from 'unist'
|
||||
import { visit } from 'unist-util-visit'
|
||||
import { slug } from 'github-slugger'
|
||||
import { toString } from 'mdast-util-to-string'
|
||||
|
||||
export default function remarkTocHeadings(options) {
|
||||
return (tree) =>
|
||||
visit(tree, 'heading', (node, index, parent) => {
|
||||
return (tree: Parent) =>
|
||||
visit(tree, 'heading', (node) => {
|
||||
const textContent = toString(node)
|
||||
options.exportRef.push({
|
||||
value: textContent,
|
@ -1,3 +1,4 @@
|
||||
import { PostFrontMatter } from 'types/PostFrontMatter'
|
||||
import fs from 'fs'
|
||||
import matter from 'gray-matter'
|
||||
import path from 'path'
|
||||
@ -6,14 +7,15 @@ import kebabCase from './utils/kebabCase'
|
||||
|
||||
const root = process.cwd()
|
||||
|
||||
export async function getAllTags(type) {
|
||||
const files = await getFiles(type)
|
||||
export async function getAllTags(type: 'blog' | 'authors') {
|
||||
const files = getFiles(type)
|
||||
|
||||
let tagCount = {}
|
||||
const tagCount: Record<string, number> = {}
|
||||
// Iterate through each post, putting all found tags into `tags`
|
||||
files.forEach((file) => {
|
||||
const source = fs.readFileSync(path.join(root, 'data', type, file), 'utf8')
|
||||
const { data } = matter(source)
|
||||
const matterFile = matter(source)
|
||||
const data = matterFile.data as PostFrontMatter
|
||||
if (data.tags && data.draft !== true) {
|
||||
data.tags.forEach((tag) => {
|
||||
const formattedTag = kebabCase(tag)
|
@ -11,13 +11,13 @@ const flattenArray = (input) =>
|
||||
|
||||
const map = (fn) => (input) => input.map(fn)
|
||||
|
||||
const walkDir = (fullPath) => {
|
||||
const walkDir = (fullPath: string) => {
|
||||
return fs.statSync(fullPath).isFile() ? fullPath : getAllFilesRecursively(fullPath)
|
||||
}
|
||||
|
||||
const pathJoinPrefix = (prefix) => (extraPath) => path.join(prefix, extraPath)
|
||||
const pathJoinPrefix = (prefix: string) => (extraPath: string) => path.join(prefix, extraPath)
|
||||
|
||||
const getAllFilesRecursively = (folder) =>
|
||||
const getAllFilesRecursively = (folder: string): string[] =>
|
||||
pipe(fs.readdirSync, map(pipe(pathJoinPrefix(folder), walkDir)), flattenArray)(folder)
|
||||
|
||||
export default getAllFilesRecursively
|
@ -1,7 +1,7 @@
|
||||
import siteMetadata from '@/data/siteMetadata'
|
||||
|
||||
const formatDate = (date) => {
|
||||
const options = {
|
||||
const formatDate = (date: string) => {
|
||||
const options: Intl.DateTimeFormatOptions = {
|
||||
year: 'numeric',
|
||||
month: 'long',
|
||||
day: 'numeric',
|
@ -1,7 +1,6 @@
|
||||
const { replace } = ''
|
||||
|
||||
// escape
|
||||
const es = /&(?:amp|#38|lt|#60|gt|#62|apos|#39|quot|#34);/g
|
||||
const ca = /[&<>'"]/g
|
||||
|
||||
const esca = {
|
||||
@ -11,7 +10,7 @@ const esca = {
|
||||
"'": ''',
|
||||
'"': '"',
|
||||
}
|
||||
const pe = (m) => esca[m]
|
||||
const pe = (m: keyof typeof esca) => esca[m]
|
||||
|
||||
/**
|
||||
* Safely escape HTML entities such as `&`, `<`, `>`, `"`, and `'`.
|
||||
@ -20,4 +19,4 @@ const pe = (m) => esca[m]
|
||||
* the input type is unexpected, except for boolean and numbers,
|
||||
* converted as string.
|
||||
*/
|
||||
export const escape = (es) => replace.call(es, ca, pe)
|
||||
export const escape = (es: string): string => replace.call(es, ca, pe)
|
@ -1,5 +1,5 @@
|
||||
import { slug } from 'github-slugger'
|
||||
|
||||
const kebabCase = (str) => slug(str)
|
||||
const kebabCase = (str: string) => slug(str)
|
||||
|
||||
export default kebabCase
|
Reference in New Issue
Block a user