style: auto fix.
This commit is contained in:
@ -1,7 +1,7 @@
|
||||
import { escape } from '@/lib/utils/htmlEscaper'
|
||||
import { escape } from '@/lib/utils/htmlEscaper';
|
||||
|
||||
import siteMetadata from '@/data/siteMetadata'
|
||||
import { PostFrontMatter } from 'types/PostFrontMatter'
|
||||
import siteMetadata from '@/data/siteMetadata';
|
||||
import { PostFrontMatter } from 'types/PostFrontMatter';
|
||||
|
||||
const generateRssItem = (post: PostFrontMatter) => `
|
||||
<item>
|
||||
@ -13,7 +13,7 @@ const generateRssItem = (post: PostFrontMatter) => `
|
||||
<author>${siteMetadata.email} (${siteMetadata.author})</author>
|
||||
${post.tags && post.tags.map((t) => `<category>${t}</category>`).join('')}
|
||||
</item>
|
||||
`
|
||||
`;
|
||||
|
||||
const generateRss = (posts: PostFrontMatter[], page = 'feed.xml') => `
|
||||
<rss version="2.0" xmlns:atom="http://www.w3.org/2005/Atom">
|
||||
@ -22,12 +22,16 @@ const generateRss = (posts: PostFrontMatter[], page = 'feed.xml') => `
|
||||
<link>${siteMetadata.siteUrl}/blog</link>
|
||||
<description>${escape(siteMetadata.description)}</description>
|
||||
<language>${siteMetadata.language}</language>
|
||||
<managingEditor>${siteMetadata.email} (${siteMetadata.author})</managingEditor>
|
||||
<managingEditor>${siteMetadata.email} (${
|
||||
siteMetadata.author
|
||||
})</managingEditor>
|
||||
<webMaster>${siteMetadata.email} (${siteMetadata.author})</webMaster>
|
||||
<lastBuildDate>${new Date(posts[0].date).toUTCString()}</lastBuildDate>
|
||||
<atom:link href="${siteMetadata.siteUrl}/${page}" rel="self" type="application/rss+xml"/>
|
||||
<atom:link href="${
|
||||
siteMetadata.siteUrl
|
||||
}/${page}" rel="self" type="application/rss+xml"/>
|
||||
${posts.map(generateRssItem).join('')}
|
||||
</channel>
|
||||
</rss>
|
||||
`
|
||||
export default generateRss
|
||||
`;
|
||||
export default generateRss;
|
||||
|
130
lib/mdx.ts
130
lib/mdx.ts
@ -1,62 +1,78 @@
|
||||
import { bundleMDX } from 'mdx-bundler'
|
||||
import fs from 'fs'
|
||||
import matter from 'gray-matter'
|
||||
import path from 'path'
|
||||
import readingTime from 'reading-time'
|
||||
import getAllFilesRecursively from './utils/files'
|
||||
import { PostFrontMatter } from 'types/PostFrontMatter'
|
||||
import { AuthorFrontMatter } from 'types/AuthorFrontMatter'
|
||||
import { Toc } from 'types/Toc'
|
||||
import { bundleMDX } from 'mdx-bundler';
|
||||
import fs from 'fs';
|
||||
import matter from 'gray-matter';
|
||||
import path from 'path';
|
||||
import readingTime from 'reading-time';
|
||||
import getAllFilesRecursively from './utils/files';
|
||||
import { PostFrontMatter } from 'types/PostFrontMatter';
|
||||
import { AuthorFrontMatter } from 'types/AuthorFrontMatter';
|
||||
import { Toc } from 'types/Toc';
|
||||
// Remark packages
|
||||
import remarkGfm from 'remark-gfm'
|
||||
import remarkFootnotes from 'remark-footnotes'
|
||||
import remarkMath from 'remark-math'
|
||||
import remarkExtractFrontmatter from './remark-extract-frontmatter'
|
||||
import remarkCodeTitles from './remark-code-title'
|
||||
import remarkTocHeadings from './remark-toc-headings'
|
||||
import remarkImgToJsx from './remark-img-to-jsx'
|
||||
import remarkGfm from 'remark-gfm';
|
||||
import remarkFootnotes from 'remark-footnotes';
|
||||
import remarkMath from 'remark-math';
|
||||
import remarkExtractFrontmatter from './remark-extract-frontmatter';
|
||||
import remarkCodeTitles from './remark-code-title';
|
||||
import remarkTocHeadings from './remark-toc-headings';
|
||||
import remarkImgToJsx from './remark-img-to-jsx';
|
||||
// Rehype packages
|
||||
import rehypeSlug from 'rehype-slug'
|
||||
import rehypeAutolinkHeadings from 'rehype-autolink-headings'
|
||||
import rehypeKatex from 'rehype-katex'
|
||||
import rehypeCitation from 'rehype-citation'
|
||||
import rehypePrismPlus from 'rehype-prism-plus'
|
||||
import rehypePresetMinify from 'rehype-preset-minify'
|
||||
import rehypeSlug from 'rehype-slug';
|
||||
import rehypeAutolinkHeadings from 'rehype-autolink-headings';
|
||||
import rehypeKatex from 'rehype-katex';
|
||||
import rehypeCitation from 'rehype-citation';
|
||||
import rehypePrismPlus from 'rehype-prism-plus';
|
||||
import rehypePresetMinify from 'rehype-preset-minify';
|
||||
|
||||
const root = process.cwd()
|
||||
const root = process.cwd();
|
||||
|
||||
export function getFiles(type: 'blog' | 'authors') {
|
||||
const prefixPaths = path.join(root, 'data', type)
|
||||
const files = getAllFilesRecursively(prefixPaths)
|
||||
const prefixPaths = path.join(root, 'data', type);
|
||||
const files = getAllFilesRecursively(prefixPaths);
|
||||
// Only want to return blog/path and ignore root, replace is needed to work on Windows
|
||||
return files.map((file) => file.slice(prefixPaths.length + 1).replace(/\\/g, '/'))
|
||||
return files.map((file) =>
|
||||
file.slice(prefixPaths.length + 1).replace(/\\/g, '/')
|
||||
);
|
||||
}
|
||||
|
||||
export function formatSlug(slug: string) {
|
||||
return slug.replace(/\.(mdx|md)/, '')
|
||||
return slug.replace(/\.(mdx|md)/, '');
|
||||
}
|
||||
|
||||
export function dateSortDesc(a: string, b: string) {
|
||||
if (a > b) return -1
|
||||
if (a < b) return 1
|
||||
return 0
|
||||
if (a > b) return -1;
|
||||
if (a < b) return 1;
|
||||
return 0;
|
||||
}
|
||||
|
||||
export async function getFileBySlug<T>(type: 'authors' | 'blog', slug: string | string[]) {
|
||||
const mdxPath = path.join(root, 'data', type, `${slug}.mdx`)
|
||||
const mdPath = path.join(root, 'data', type, `${slug}.md`)
|
||||
export async function getFileBySlug<T>(
|
||||
type: 'authors' | 'blog',
|
||||
slug: string | string[]
|
||||
) {
|
||||
const mdxPath = path.join(root, 'data', type, `${slug}.mdx`);
|
||||
const mdPath = path.join(root, 'data', type, `${slug}.md`);
|
||||
const source = fs.existsSync(mdxPath)
|
||||
? fs.readFileSync(mdxPath, 'utf8')
|
||||
: fs.readFileSync(mdPath, 'utf8')
|
||||
: fs.readFileSync(mdPath, 'utf8');
|
||||
|
||||
// https://github.com/kentcdodds/mdx-bundler#nextjs-esbuild-enoent
|
||||
if (process.platform === 'win32') {
|
||||
process.env.ESBUILD_BINARY_PATH = path.join(root, 'node_modules', 'esbuild', 'esbuild.exe')
|
||||
process.env.ESBUILD_BINARY_PATH = path.join(
|
||||
root,
|
||||
'node_modules',
|
||||
'esbuild',
|
||||
'esbuild.exe'
|
||||
);
|
||||
} else {
|
||||
process.env.ESBUILD_BINARY_PATH = path.join(root, 'node_modules', 'esbuild', 'bin', 'esbuild')
|
||||
process.env.ESBUILD_BINARY_PATH = path.join(
|
||||
root,
|
||||
'node_modules',
|
||||
'esbuild',
|
||||
'bin',
|
||||
'esbuild'
|
||||
);
|
||||
}
|
||||
|
||||
const toc: Toc = []
|
||||
const toc: Toc = [];
|
||||
|
||||
const { code, frontmatter } = await bundleMDX({
|
||||
source,
|
||||
@ -75,7 +91,7 @@ export async function getFileBySlug<T>(type: 'authors' | 'blog', slug: string |
|
||||
[remarkFootnotes, { inlineNotes: true }],
|
||||
remarkMath,
|
||||
remarkImgToJsx,
|
||||
]
|
||||
];
|
||||
options.rehypePlugins = [
|
||||
...(options.rehypePlugins ?? []),
|
||||
rehypeSlug,
|
||||
@ -84,17 +100,17 @@ export async function getFileBySlug<T>(type: 'authors' | 'blog', slug: string |
|
||||
[rehypeCitation, { path: path.join(root, 'data') }],
|
||||
[rehypePrismPlus, { ignoreMissing: true }],
|
||||
rehypePresetMinify,
|
||||
]
|
||||
return options
|
||||
];
|
||||
return options;
|
||||
},
|
||||
esbuildOptions: (options) => {
|
||||
options.loader = {
|
||||
...options.loader,
|
||||
'.js': 'jsx',
|
||||
}
|
||||
return options
|
||||
};
|
||||
return options;
|
||||
},
|
||||
})
|
||||
});
|
||||
|
||||
return {
|
||||
mdxSource: code,
|
||||
@ -106,34 +122,36 @@ export async function getFileBySlug<T>(type: 'authors' | 'blog', slug: string |
|
||||
...frontmatter,
|
||||
date: frontmatter.date ? new Date(frontmatter.date).toISOString() : null,
|
||||
},
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
export async function getAllFilesFrontMatter(folder: 'blog') {
|
||||
const prefixPaths = path.join(root, 'data', folder)
|
||||
const prefixPaths = path.join(root, 'data', folder);
|
||||
|
||||
const files = getAllFilesRecursively(prefixPaths)
|
||||
const files = getAllFilesRecursively(prefixPaths);
|
||||
|
||||
const allFrontMatter: PostFrontMatter[] = []
|
||||
const allFrontMatter: PostFrontMatter[] = [];
|
||||
|
||||
files.forEach((file: string) => {
|
||||
// Replace is needed to work on Windows
|
||||
const fileName = file.slice(prefixPaths.length + 1).replace(/\\/g, '/')
|
||||
const fileName = file.slice(prefixPaths.length + 1).replace(/\\/g, '/');
|
||||
// Remove Unexpected File
|
||||
if (path.extname(fileName) !== '.md' && path.extname(fileName) !== '.mdx') {
|
||||
return
|
||||
return;
|
||||
}
|
||||
const source = fs.readFileSync(file, 'utf8')
|
||||
const matterFile = matter(source)
|
||||
const frontmatter = matterFile.data as AuthorFrontMatter | PostFrontMatter
|
||||
const source = fs.readFileSync(file, 'utf8');
|
||||
const matterFile = matter(source);
|
||||
const frontmatter = matterFile.data as AuthorFrontMatter | PostFrontMatter;
|
||||
if ('draft' in frontmatter && frontmatter.draft !== true) {
|
||||
allFrontMatter.push({
|
||||
...frontmatter,
|
||||
slug: formatSlug(fileName),
|
||||
date: frontmatter.date ? new Date(frontmatter.date).toISOString() : null,
|
||||
})
|
||||
date: frontmatter.date
|
||||
? new Date(frontmatter.date).toISOString()
|
||||
: null,
|
||||
});
|
||||
}
|
||||
})
|
||||
});
|
||||
|
||||
return allFrontMatter.sort((a, b) => dateSortDesc(a.date, b.date))
|
||||
return allFrontMatter.sort((a, b) => dateSortDesc(a.date, b.date));
|
||||
}
|
||||
|
@ -1,32 +1,38 @@
|
||||
import { visit, Parent } from 'unist-util-visit'
|
||||
import { visit, Parent } from 'unist-util-visit';
|
||||
|
||||
export default function remarkCodeTitles() {
|
||||
return (tree: Parent & { lang?: string }) =>
|
||||
visit(tree, 'code', (node: Parent & { lang?: string }, index, parent: Parent) => {
|
||||
const nodeLang = node.lang || ''
|
||||
let language = ''
|
||||
let title = ''
|
||||
visit(
|
||||
tree,
|
||||
'code',
|
||||
(node: Parent & { lang?: string }, index, parent: Parent) => {
|
||||
const nodeLang = node.lang || '';
|
||||
let language = '';
|
||||
let title = '';
|
||||
|
||||
if (nodeLang.includes(':')) {
|
||||
language = nodeLang.slice(0, nodeLang.search(':'))
|
||||
title = nodeLang.slice(nodeLang.search(':') + 1, nodeLang.length)
|
||||
if (nodeLang.includes(':')) {
|
||||
language = nodeLang.slice(0, nodeLang.search(':'));
|
||||
title = nodeLang.slice(nodeLang.search(':') + 1, nodeLang.length);
|
||||
}
|
||||
|
||||
if (!title) {
|
||||
return;
|
||||
}
|
||||
|
||||
const className = 'remark-code-title';
|
||||
|
||||
const titleNode = {
|
||||
type: 'mdxJsxFlowElement',
|
||||
name: 'div',
|
||||
attributes: [
|
||||
{ type: 'mdxJsxAttribute', name: 'className', value: className },
|
||||
],
|
||||
children: [{ type: 'text', value: title }],
|
||||
data: { _xdmExplicitJsx: true },
|
||||
};
|
||||
|
||||
parent.children.splice(index, 0, titleNode);
|
||||
node.lang = language;
|
||||
}
|
||||
|
||||
if (!title) {
|
||||
return
|
||||
}
|
||||
|
||||
const className = 'remark-code-title'
|
||||
|
||||
const titleNode = {
|
||||
type: 'mdxJsxFlowElement',
|
||||
name: 'div',
|
||||
attributes: [{ type: 'mdxJsxAttribute', name: 'className', value: className }],
|
||||
children: [{ type: 'text', value: title }],
|
||||
data: { _xdmExplicitJsx: true },
|
||||
}
|
||||
|
||||
parent.children.splice(index, 0, titleNode)
|
||||
node.lang = language
|
||||
})
|
||||
);
|
||||
}
|
||||
|
@ -1,12 +1,12 @@
|
||||
import { VFile } from 'vfile'
|
||||
import { visit, Parent } from 'unist-util-visit'
|
||||
import { load } from 'js-yaml'
|
||||
import { VFile } from 'vfile';
|
||||
import { visit, Parent } from 'unist-util-visit';
|
||||
import { load } from 'js-yaml';
|
||||
|
||||
export default function extractFrontmatter() {
|
||||
return (tree: Parent, file: VFile) => {
|
||||
visit(tree, 'yaml', (node: Parent) => {
|
||||
//@ts-ignore
|
||||
file.data.frontmatter = load(node.value)
|
||||
})
|
||||
}
|
||||
file.data.frontmatter = load(node.value);
|
||||
});
|
||||
};
|
||||
}
|
||||
|
@ -1,14 +1,14 @@
|
||||
import { Literal } from 'unist'
|
||||
import { visit, Parent, Node } from 'unist-util-visit'
|
||||
import sizeOf from 'image-size'
|
||||
import fs from 'fs'
|
||||
import { Literal } from 'unist';
|
||||
import { visit, Parent, Node } from 'unist-util-visit';
|
||||
import sizeOf from 'image-size';
|
||||
import fs from 'fs';
|
||||
|
||||
type ImageNode = Parent & {
|
||||
url: string
|
||||
alt: string
|
||||
name: string
|
||||
attributes: (Literal & { name: string })[]
|
||||
}
|
||||
url: string;
|
||||
alt: string;
|
||||
name: string;
|
||||
attributes: (Literal & { name: string })[];
|
||||
};
|
||||
|
||||
export default function remarkImgToJsx() {
|
||||
return (tree: Node) => {
|
||||
@ -16,29 +16,40 @@ export default function remarkImgToJsx() {
|
||||
tree,
|
||||
// only visit p tags that contain an img element
|
||||
(node: Parent): node is Parent =>
|
||||
node.type === 'paragraph' && node.children.some((n) => n.type === 'image'),
|
||||
node.type === 'paragraph' &&
|
||||
node.children.some((n) => n.type === 'image'),
|
||||
(node: Parent) => {
|
||||
const imageNode = node.children.find((n) => n.type === 'image') as ImageNode
|
||||
const imageNode = node.children.find(
|
||||
(n) => n.type === 'image'
|
||||
) as ImageNode;
|
||||
|
||||
// only local files
|
||||
if (fs.existsSync(`${process.cwd()}/public${imageNode.url}`)) {
|
||||
const dimensions = sizeOf(`${process.cwd()}/public${imageNode.url}`)
|
||||
const dimensions = sizeOf(`${process.cwd()}/public${imageNode.url}`);
|
||||
|
||||
// Convert original node to next/image
|
||||
;(imageNode.type = 'mdxJsxFlowElement'),
|
||||
(imageNode.type = 'mdxJsxFlowElement'),
|
||||
(imageNode.name = 'Image'),
|
||||
(imageNode.attributes = [
|
||||
{ type: 'mdxJsxAttribute', name: 'alt', value: imageNode.alt },
|
||||
{ type: 'mdxJsxAttribute', name: 'src', value: imageNode.url },
|
||||
{ type: 'mdxJsxAttribute', name: 'width', value: dimensions.width },
|
||||
{ type: 'mdxJsxAttribute', name: 'height', value: dimensions.height },
|
||||
])
|
||||
{
|
||||
type: 'mdxJsxAttribute',
|
||||
name: 'width',
|
||||
value: dimensions.width,
|
||||
},
|
||||
{
|
||||
type: 'mdxJsxAttribute',
|
||||
name: 'height',
|
||||
value: dimensions.height,
|
||||
},
|
||||
]);
|
||||
|
||||
// Change node type from p to div to avoid nesting error
|
||||
node.type = 'div'
|
||||
node.children = [imageNode]
|
||||
node.type = 'div';
|
||||
node.children = [imageNode];
|
||||
}
|
||||
}
|
||||
)
|
||||
}
|
||||
);
|
||||
};
|
||||
}
|
||||
|
@ -1,17 +1,17 @@
|
||||
//@ts-nocheck
|
||||
import { Parent } from 'unist'
|
||||
import { visit } from 'unist-util-visit'
|
||||
import { slug } from 'github-slugger'
|
||||
import { toString } from 'mdast-util-to-string'
|
||||
import { Parent } from 'unist';
|
||||
import { visit } from 'unist-util-visit';
|
||||
import { slug } from 'github-slugger';
|
||||
import { toString } from 'mdast-util-to-string';
|
||||
|
||||
export default function remarkTocHeadings(options) {
|
||||
return (tree: Parent) =>
|
||||
visit(tree, 'heading', (node) => {
|
||||
const textContent = toString(node)
|
||||
const textContent = toString(node);
|
||||
options.exportRef.push({
|
||||
value: textContent,
|
||||
url: '#' + slug(textContent),
|
||||
depth: node.depth,
|
||||
})
|
||||
})
|
||||
});
|
||||
});
|
||||
}
|
||||
|
36
lib/tags.ts
36
lib/tags.ts
@ -1,32 +1,32 @@
|
||||
import { PostFrontMatter } from 'types/PostFrontMatter'
|
||||
import fs from 'fs'
|
||||
import matter from 'gray-matter'
|
||||
import path from 'path'
|
||||
import { getFiles } from './mdx'
|
||||
import kebabCase from './utils/kebabCase'
|
||||
import { PostFrontMatter } from 'types/PostFrontMatter';
|
||||
import fs from 'fs';
|
||||
import matter from 'gray-matter';
|
||||
import path from 'path';
|
||||
import { getFiles } from './mdx';
|
||||
import kebabCase from './utils/kebabCase';
|
||||
|
||||
const root = process.cwd()
|
||||
const root = process.cwd();
|
||||
|
||||
export async function getAllTags(type: 'blog' | 'authors') {
|
||||
const files = getFiles(type)
|
||||
const files = getFiles(type);
|
||||
|
||||
const tagCount: Record<string, number> = {}
|
||||
const tagCount: Record<string, number> = {};
|
||||
// Iterate through each post, putting all found tags into `tags`
|
||||
files.forEach((file) => {
|
||||
const source = fs.readFileSync(path.join(root, 'data', type, file), 'utf8')
|
||||
const matterFile = matter(source)
|
||||
const data = matterFile.data as PostFrontMatter
|
||||
const source = fs.readFileSync(path.join(root, 'data', type, file), 'utf8');
|
||||
const matterFile = matter(source);
|
||||
const data = matterFile.data as PostFrontMatter;
|
||||
if (data.tags && data.draft !== true) {
|
||||
data.tags.forEach((tag) => {
|
||||
const formattedTag = kebabCase(tag)
|
||||
const formattedTag = kebabCase(tag);
|
||||
if (formattedTag in tagCount) {
|
||||
tagCount[formattedTag] += 1
|
||||
tagCount[formattedTag] += 1;
|
||||
} else {
|
||||
tagCount[formattedTag] = 1
|
||||
tagCount[formattedTag] = 1;
|
||||
}
|
||||
})
|
||||
});
|
||||
}
|
||||
})
|
||||
});
|
||||
|
||||
return tagCount
|
||||
return tagCount;
|
||||
}
|
||||
|
@ -1,23 +1,33 @@
|
||||
import fs from 'fs'
|
||||
import path from 'path'
|
||||
import fs from 'fs';
|
||||
import path from 'path';
|
||||
|
||||
const pipe =
|
||||
(...fns) =>
|
||||
(x) =>
|
||||
fns.reduce((v, f) => f(v), x)
|
||||
fns.reduce((v, f) => f(v), x);
|
||||
|
||||
const flattenArray = (input) =>
|
||||
input.reduce((acc, item) => [...acc, ...(Array.isArray(item) ? item : [item])], [])
|
||||
input.reduce(
|
||||
(acc, item) => [...acc, ...(Array.isArray(item) ? item : [item])],
|
||||
[]
|
||||
);
|
||||
|
||||
const map = (fn) => (input) => input.map(fn)
|
||||
const map = (fn) => (input) => input.map(fn);
|
||||
|
||||
const walkDir = (fullPath: string) => {
|
||||
return fs.statSync(fullPath).isFile() ? fullPath : getAllFilesRecursively(fullPath)
|
||||
}
|
||||
return fs.statSync(fullPath).isFile()
|
||||
? fullPath
|
||||
: getAllFilesRecursively(fullPath);
|
||||
};
|
||||
|
||||
const pathJoinPrefix = (prefix: string) => (extraPath: string) => path.join(prefix, extraPath)
|
||||
const pathJoinPrefix = (prefix: string) => (extraPath: string) =>
|
||||
path.join(prefix, extraPath);
|
||||
|
||||
const getAllFilesRecursively = (folder: string): string[] =>
|
||||
pipe(fs.readdirSync, map(pipe(pathJoinPrefix(folder), walkDir)), flattenArray)(folder)
|
||||
pipe(
|
||||
fs.readdirSync,
|
||||
map(pipe(pathJoinPrefix(folder), walkDir)),
|
||||
flattenArray
|
||||
)(folder);
|
||||
|
||||
export default getAllFilesRecursively
|
||||
export default getAllFilesRecursively;
|
||||
|
@ -1,14 +1,14 @@
|
||||
import siteMetadata from '@/data/siteMetadata'
|
||||
import siteMetadata from '@/data/siteMetadata';
|
||||
|
||||
const formatDate = (date: string) => {
|
||||
const options: Intl.DateTimeFormatOptions = {
|
||||
year: 'numeric',
|
||||
month: 'long',
|
||||
day: 'numeric',
|
||||
}
|
||||
const now = new Date(date).toLocaleDateString(siteMetadata.locale, options)
|
||||
};
|
||||
const now = new Date(date).toLocaleDateString(siteMetadata.locale, options);
|
||||
|
||||
return now
|
||||
}
|
||||
return now;
|
||||
};
|
||||
|
||||
export default formatDate
|
||||
export default formatDate;
|
||||
|
@ -1,7 +1,7 @@
|
||||
const { replace } = ''
|
||||
const { replace } = '';
|
||||
|
||||
// escape
|
||||
const ca = /[&<>'"]/g
|
||||
const ca = /[&<>'"]/g;
|
||||
|
||||
const esca = {
|
||||
'&': '&',
|
||||
@ -9,8 +9,8 @@ const esca = {
|
||||
'>': '>',
|
||||
"'": ''',
|
||||
'"': '"',
|
||||
}
|
||||
const pe = (m: keyof typeof esca) => esca[m]
|
||||
};
|
||||
const pe = (m: keyof typeof esca) => esca[m];
|
||||
|
||||
/**
|
||||
* Safely escape HTML entities such as `&`, `<`, `>`, `"`, and `'`.
|
||||
@ -19,4 +19,4 @@ const pe = (m: keyof typeof esca) => esca[m]
|
||||
* the input type is unexpected, except for boolean and numbers,
|
||||
* converted as string.
|
||||
*/
|
||||
export const escape = (es: string): string => replace.call(es, ca, pe)
|
||||
export const escape = (es: string): string => replace.call(es, ca, pe);
|
||||
|
@ -1,5 +1,5 @@
|
||||
import { slug } from 'github-slugger'
|
||||
import { slug } from 'github-slugger';
|
||||
|
||||
const kebabCase = (str: string) => slug(str)
|
||||
const kebabCase = (str: string) => slug(str);
|
||||
|
||||
export default kebabCase
|
||||
export default kebabCase;
|
||||
|
Reference in New Issue
Block a user