chore(mdx-loader): migrate package to TypeScript (#5347)

* Polish code style

Signed-off-by: Josh-Cena <sidachen2003@gmail.com>

* Partly done migration

Signed-off-by: Josh-Cena <sidachen2003@gmail.com>

* Complete typing

Signed-off-by: Josh-Cena <sidachen2003@gmail.com>

* Fix tests

Signed-off-by: Josh-Cena <sidachen2003@gmail.com>

* A-ha

Signed-off-by: Josh-Cena <sidachen2003@gmail.com>

* Cleanup

Signed-off-by: Josh-Cena <sidachen2003@gmail.com>

* Fix error

Signed-off-by: Josh-Cena <sidachen2003@gmail.com>

* Cleanup

Signed-off-by: Josh-Cena <sidachen2003@gmail.com>
This commit is contained in:
Joshua Chen 2021-08-12 20:55:14 +08:00 committed by GitHub
parent ac4a253cdf
commit 3fc47938a5
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
27 changed files with 345 additions and 287 deletions

View file

@ -46,7 +46,7 @@ module.exports = {
// Ignore certain webpack alias because it can't be resolved // Ignore certain webpack alias because it can't be resolved
'import/no-unresolved': [ 'import/no-unresolved': [
ERROR, ERROR,
{ignore: ['^@theme', '^@docusaurus', '^@generated']}, {ignore: ['^@theme', '^@docusaurus', '^@generated', 'unist', 'mdast']},
], ],
'import/extensions': OFF, 'import/extensions': OFF,
'header/header': [ 'header/header': [

View file

@ -2,8 +2,8 @@
"name": "@docusaurus/mdx-loader", "name": "@docusaurus/mdx-loader",
"version": "2.0.0-beta.4", "version": "2.0.0-beta.4",
"description": "Docusaurus Loader for MDX", "description": "Docusaurus Loader for MDX",
"main": "src/index.js", "main": "lib/index.js",
"types": "src/index.d.ts", "types": "src/types.d.ts",
"publishConfig": { "publishConfig": {
"access": "public" "access": "public"
}, },
@ -39,6 +39,10 @@
}, },
"devDependencies": { "devDependencies": {
"@docusaurus/types": "2.0.0-beta.4", "@docusaurus/types": "2.0.0-beta.4",
"@types/escape-html": "^1.0.1",
"@types/mdast": "^3.0.7",
"@types/stringify-object": "^3.3.1",
"@types/unist": "^2.0.6",
"remark": "^12.0.0", "remark": "^12.0.0",
"remark-mdx": "^1.6.21", "remark-mdx": "^1.6.21",
"to-vfile": "^6.0.0", "to-vfile": "^6.0.0",

View file

@ -1,19 +0,0 @@
/**
* Copyright (c) Facebook, Inc. and its affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree.
*/
type RemarkOrRehypePlugin =
// eslint-disable-next-line @typescript-eslint/ban-types
[Function, Record<string, unknown>] | Function;
declare function docusaurusMdxLoader(fileString: string): string;
export interface RemarkAndRehypePluginOptions {
remarkPlugins: RemarkOrRehypePlugin[];
rehypePlugins: string[];
beforeDefaultRemarkPlugins: RemarkOrRehypePlugin[];
beforeDefaultRehypePlugins: RemarkOrRehypePlugin[];
}

View file

@ -5,36 +5,45 @@
* LICENSE file in the root directory of this source tree. * LICENSE file in the root directory of this source tree.
*/ */
const {readFile} = require('fs-extra'); import {readFile} from 'fs-extra';
const mdx = require('@mdx-js/mdx'); import mdx from '@mdx-js/mdx';
const chalk = require('chalk'); import chalk from 'chalk';
const emoji = require('remark-emoji'); import emoji from 'remark-emoji';
const { import {
parseFrontMatter, parseFrontMatter,
parseMarkdownContentTitle, parseMarkdownContentTitle,
} = require('@docusaurus/utils'); escapePath,
const stringifyObject = require('stringify-object'); } from '@docusaurus/utils';
const headings = require('./remark/headings'); import stringifyObject from 'stringify-object';
const toc = require('./remark/toc'); import headings from './remark/headings';
const unwrapMdxCodeBlocks = require('./remark/unwrapMdxCodeBlocks'); import toc from './remark/toc';
const transformImage = require('./remark/transformImage'); import unwrapMdxCodeBlocks from './remark/unwrapMdxCodeBlocks';
const transformLinks = require('./remark/transformLinks'); import transformImage from './remark/transformImage';
const {escapePath} = require('@docusaurus/utils'); import transformLinks from './remark/transformLinks';
const {getFileLoaderUtils} = require('@docusaurus/core/lib/webpack/utils'); import {getFileLoaderUtils} from '@docusaurus/core/lib/webpack/utils';
import type {RemarkAndRehypePluginOptions} from '@docusaurus/mdx-loader';
// TODO temporary until Webpack5 export this type
// see https://github.com/webpack/webpack/issues/11630
interface Loader extends Function {
(this: any, source: string): Promise<string | Buffer | void | undefined>;
}
const { const {
loaders: {inlineMarkdownImageFileLoader}, loaders: {inlineMarkdownImageFileLoader},
} = getFileLoaderUtils(); } = getFileLoaderUtils();
const DEFAULT_OPTIONS = { const DEFAULT_OPTIONS: RemarkAndRehypePluginOptions = {
rehypePlugins: [], rehypePlugins: [],
remarkPlugins: [unwrapMdxCodeBlocks, emoji, headings, toc], remarkPlugins: [unwrapMdxCodeBlocks, emoji, headings, toc],
beforeDefaultRemarkPlugins: [],
beforeDefaultRehypePlugins: [],
}; };
// When this throws, it generally means that there's no metadata file associated with this MDX document // When this throws, it generally means that there's no metadata file associated with this MDX document
// It can happen when using MDX partials (usually starting with _) // It can happen when using MDX partials (usually starting with _)
// That's why it's important to provide the "isMDXPartial" function in config // That's why it's important to provide the "isMDXPartial" function in config
async function readMetadataPath(metadataPath) { async function readMetadataPath(metadataPath: string) {
try { try {
return await readFile(metadataPath, 'utf8'); return await readFile(metadataPath, 'utf8');
} catch (e) { } catch (e) {
@ -48,15 +57,14 @@ async function readMetadataPath(metadataPath) {
// We don't do that for all frontMatters, only for the configured keys // We don't do that for all frontMatters, only for the configured keys
// {image: "./myImage.png"} => {image: require("./myImage.png")} // {image: "./myImage.png"} => {image: require("./myImage.png")}
function createFrontMatterAssetsExportCode( function createFrontMatterAssetsExportCode(
filePath, frontMatter: Record<string, unknown>,
frontMatter, frontMatterAssetKeys: string[] = [],
frontMatterAssetKeys = [],
) { ) {
if (frontMatterAssetKeys.length === 0) { if (frontMatterAssetKeys.length === 0) {
return 'undefined'; return 'undefined';
} }
function createFrontMatterAssetRequireCode(value) { function createFrontMatterAssetRequireCode(value: unknown) {
// Only process string values starting with ./ // Only process string values starting with ./
// We could enhance this logic and check if file exists on disc? // We could enhance this logic and check if file exists on disc?
if (typeof value === 'string' && value.startsWith('./')) { if (typeof value === 'string' && value.startsWith('./')) {
@ -84,7 +92,7 @@ function createFrontMatterAssetsExportCode(
return exportValue; return exportValue;
} }
module.exports = async function docusaurusMdxLoader(fileString) { const docusaurusMdxLoader: Loader = async function (fileString) {
const callback = this.async(); const callback = this.async();
const filePath = this.resourcePath; const filePath = this.resourcePath;
const reqOptions = this.getOptions() || {}; const reqOptions = this.getOptions() || {};
@ -122,35 +130,25 @@ module.exports = async function docusaurusMdxLoader(fileString) {
return callback(err); return callback(err);
} }
let exportStr = ``; let exportStr = `
exportStr += `\nexport const frontMatter = ${stringifyObject(frontMatter)};`; export const frontMatter = ${stringifyObject(frontMatter)};
exportStr += `\nexport const frontMatterAssets = ${createFrontMatterAssetsExportCode( export const frontMatterAssets = ${createFrontMatterAssetsExportCode(
filePath,
frontMatter, frontMatter,
reqOptions.frontMatterAssetKeys, reqOptions.frontMatterAssetKeys,
)};`; )};
exportStr += `\nexport const contentTitle = ${stringifyObject( export const contentTitle = ${stringifyObject(contentTitle)};`;
contentTitle,
)};`;
// MDX partials are MDX files starting with _ or in a folder starting with _ // MDX partials are MDX files starting with _ or in a folder starting with _
// Partial are not expected to have an associated metadata file or frontmatter // Partial are not expected to have an associated metadata file or frontmatter
const isMDXPartial = options.isMDXPartial const isMDXPartial = options.isMDXPartial && options.isMDXPartial(filePath);
? options.isMDXPartial(filePath)
: false;
if (isMDXPartial && hasFrontMatter) { if (isMDXPartial && hasFrontMatter) {
const errorMessage = `Docusaurus MDX partial files should not contain FrontMatter. const errorMessage = `Docusaurus MDX partial files should not contain FrontMatter.
Those partial files use the _ prefix as a convention by default, but this is configurable. Those partial files use the _ prefix as a convention by default, but this is configurable.
File at ${filePath} contains FrontMatter that will be ignored: \n${JSON.stringify( File at ${filePath} contains FrontMatter that will be ignored:
frontMatter, ${JSON.stringify(frontMatter, null, 2)}`;
null,
2,
)}`;
if (options.isMDXPartialFrontMatterWarningDisabled === true) { if (!options.isMDXPartialFrontMatterWarningDisabled) {
// no warning
} else {
const shouldError = process.env.NODE_ENV === 'test' || process.env.CI; const shouldError = process.env.NODE_ENV === 'test' || process.env.CI;
if (shouldError) { if (shouldError) {
return callback(new Error(errorMessage)); return callback(new Error(errorMessage));
@ -185,3 +183,5 @@ File at ${filePath} contains FrontMatter that will be ignored: \n${JSON.stringif
return callback(null, code); return callback(null, code);
}; };
export default docusaurusMdxLoader;

View file

@ -7,18 +7,25 @@
/* Based on remark-slug (https://github.com/remarkjs/remark-slug) and gatsby-remark-autolink-headers (https://github.com/gatsbyjs/gatsby/blob/master/packages/gatsby-remark-autolink-headers) */ /* Based on remark-slug (https://github.com/remarkjs/remark-slug) and gatsby-remark-autolink-headers (https://github.com/gatsbyjs/gatsby/blob/master/packages/gatsby-remark-autolink-headers) */
const {parseMarkdownHeadingId} = require('@docusaurus/utils'); import {parseMarkdownHeadingId} from '@docusaurus/utils';
const visit = require('unist-util-visit'); import visit, {Visitor} from 'unist-util-visit';
const toString = require('mdast-util-to-string'); import toString from 'mdast-util-to-string';
const slugs = require('github-slugger')(); import Slugger from 'github-slugger';
import type {Transformer} from 'unified';
import type {Parent} from 'unist';
import type {Heading, Text} from 'mdast';
function headings() { const slugs = new Slugger();
const transformer = (ast) => {
function headings(): Transformer {
const transformer: Transformer = (ast) => {
slugs.reset(); slugs.reset();
function visitor(headingNode) { const visitor: Visitor<Heading> = (headingNode) => {
const data = headingNode.data || (headingNode.data = {}); // eslint-disable-line const data = headingNode.data || (headingNode.data = {});
const properties = data.hProperties || (data.hProperties = {}); const properties = (data.hProperties || (data.hProperties = {})) as {
id: string;
};
let {id} = properties; let {id} = properties;
if (id) { if (id) {
@ -29,7 +36,7 @@ function headings() {
); );
const heading = toString( const heading = toString(
headingTextNodes.length > 0 headingTextNodes.length > 0
? {children: headingTextNodes} ? ({children: headingTextNodes} as Parent)
: headingNode, : headingNode,
); );
@ -42,8 +49,9 @@ function headings() {
// When there's an id, it is always in the last child node // When there's an id, it is always in the last child node
// Sometimes heading is in multiple "parts" (** syntax creates a child node): // Sometimes heading is in multiple "parts" (** syntax creates a child node):
// ## part1 *part2* part3 {#id} // ## part1 *part2* part3 {#id}
const lastNode = const lastNode = headingNode.children[
headingNode.children[headingNode.children.length - 1]; headingNode.children.length - 1
] as Text;
if (headingNode.children.length > 1) { if (headingNode.children.length > 1) {
const lastNodeText = parseMarkdownHeadingId(lastNode.value).text; const lastNodeText = parseMarkdownHeadingId(lastNode.value).text;
@ -63,7 +71,7 @@ function headings() {
data.id = id; data.id = id;
properties.id = id; properties.id = id;
} };
visit(ast, 'heading', visitor); visit(ast, 'heading', visitor);
}; };
@ -71,4 +79,4 @@ function headings() {
return transformer; return transformer;
} }
module.exports = headings; export default headings;

View file

@ -5,34 +5,42 @@
* LICENSE file in the root directory of this source tree. * LICENSE file in the root directory of this source tree.
*/ */
const {parse} = require('@babel/parser'); import {parse, ParserOptions} from '@babel/parser';
const traverse = require('@babel/traverse').default; import type {Identifier} from '@babel/types';
const stringifyObject = require('stringify-object'); import traverse from '@babel/traverse';
const search = require('./search'); import stringifyObject from 'stringify-object';
import search from './search';
import type {Plugin, Transformer} from 'unified';
import type {Node, Parent} from 'unist';
import type {Literal} from 'mdast';
const parseOptions = { const parseOptions: ParserOptions = {
plugins: ['jsx'], plugins: ['jsx'],
sourceType: 'module', sourceType: 'module',
}; };
const isImport = (child) => child.type === 'import';
const hasImports = (index) => index > -1;
const isExport = (child) => child.type === 'export';
const isTarget = (child, name) => { const isImport = (child: Node): child is Literal => child.type === 'import';
const hasImports = (index: number) => index > -1;
const isExport = (child: Node): child is Literal => child.type === 'export';
interface PluginOptions {
name?: string;
}
const isTarget = (child: Literal, name: string) => {
let found = false; let found = false;
const ast = parse(child.value, parseOptions); const ast = parse(child.value, parseOptions);
traverse(ast, { traverse(ast, {
VariableDeclarator: (path) => { VariableDeclarator: (path) => {
if (path.node.id.name === name) { if ((path.node.id as Identifier).name === name) {
found = true; found = true;
} }
}, },
}); });
return found; return found;
}; };
const getOrCreateExistingTargetIndex = (children, name) => { const getOrCreateExistingTargetIndex = (children: Node[], name: string) => {
let importsIndex = -1; let importsIndex = -1;
let targetIndex = -1; let targetIndex = -1;
@ -58,12 +66,12 @@ const getOrCreateExistingTargetIndex = (children, name) => {
return targetIndex; return targetIndex;
}; };
const plugin = (options = {}) => { const plugin: Plugin<[PluginOptions?]> = (options = {}) => {
const name = options.name || 'toc'; const name = options.name || 'toc';
const transformer = (node) => { const transformer: Transformer = (node) => {
const headings = search(node); const headings = search(node);
const {children} = node; const {children} = node as Parent<Literal>;
const targetIndex = getOrCreateExistingTargetIndex(children, name); const targetIndex = getOrCreateExistingTargetIndex(children, name);
if (headings && headings.length) { if (headings && headings.length) {
@ -76,4 +84,4 @@ const plugin = (options = {}) => {
return transformer; return transformer;
}; };
module.exports = plugin; export default plugin;

View file

@ -1,73 +0,0 @@
/**
* Copyright (c) Facebook, Inc. and its affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree.
*/
// @ts-check
const toString = require('mdast-util-to-string');
const visit = require('unist-util-visit');
// Destructuring require tslib
// eslint-disable-next-line prefer-destructuring
const toValue = require('../utils').toValue;
/** @typedef {import('@docusaurus/types').TOCItem} TOC */
/** @typedef {import('unist').Node} Node */
/**
* @typedef {Object} StringValuedNode
* @property {string} type
* @property {string} value
* @property {number} depth
* @property {Object} data
* @property {StringValuedNode[]} children
*/
// Visit all headings. We `slug` all headings (to account for
// duplicates), but only take h2 and h3 headings.
/**
* @param {StringValuedNode} node
* @returns {TOC[]}
*/
function search(node) {
/** @type {TOC[]} */
const headings = [];
let current = -1;
let currentDepth = 0;
/**
* @param {StringValuedNode} child
* @param {number} index
* @param {Node | undefined} parent
* @returns {void}
*/
const onHeading = (child, index, parent) => {
const value = toString(child);
if (parent !== node || !value || child.depth > 3 || child.depth < 2) {
return;
}
const entry = {
value: toValue(child),
id: child.data.id,
children: [],
};
if (!headings.length || currentDepth >= child.depth) {
headings.push(entry);
current += 1;
currentDepth = child.depth;
} else {
headings[current].children.push(entry);
}
};
visit(node, 'heading', onHeading);
return headings;
}
module.exports = search;

View file

@ -0,0 +1,47 @@
/**
* Copyright (c) Facebook, Inc. and its affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree.
*/
import toString from 'mdast-util-to-string';
import visit, {Visitor} from 'unist-util-visit';
import {toValue} from '../utils';
import type {TOCItem as TOC} from '@docusaurus/types';
import type {Node} from 'unist';
import type {Heading} from 'mdast';
// Visit all headings. We `slug` all headings (to account for
// duplicates), but only take h2 and h3 headings.
export default function search(node: Node): TOC[] {
const headings: TOC[] = [];
let current = -1;
let currentDepth = 0;
const visitor: Visitor<Heading> = (child, _index, parent) => {
const value = toString(child);
if (parent !== node || !value || child.depth > 3 || child.depth < 2) {
return;
}
const entry: TOC = {
value: toValue(child),
id: child.data!.id as string,
children: [],
};
if (!headings.length || currentDepth >= child.depth) {
headings.push(entry);
current += 1;
currentDepth = child.depth;
} else {
headings[current].children.push(entry);
}
};
visit(node, 'heading', visitor);
return headings;
}

View file

@ -5,26 +5,35 @@
* LICENSE file in the root directory of this source tree. * LICENSE file in the root directory of this source tree.
*/ */
const visit = require('unist-util-visit'); import visit from 'unist-util-visit';
const path = require('path'); import path from 'path';
const url = require('url'); import url from 'url';
const fs = require('fs-extra'); import fs from 'fs-extra';
const escapeHtml = require('escape-html'); import escapeHtml from 'escape-html';
const {getFileLoaderUtils} = require('@docusaurus/core/lib/webpack/utils'); import {getFileLoaderUtils} from '@docusaurus/core/lib/webpack/utils';
const { import {
posixPath, posixPath,
escapePath, escapePath,
toMessageRelativeFilePath, toMessageRelativeFilePath,
} = require('@docusaurus/utils'); } from '@docusaurus/utils';
import type {Plugin, Transformer} from 'unified';
import type {Image, Literal} from 'mdast';
const { const {
loaders: {inlineMarkdownImageFileLoader}, loaders: {inlineMarkdownImageFileLoader},
} = getFileLoaderUtils(); } = getFileLoaderUtils();
const createJSX = (node, pathUrl) => { interface PluginOptions {
filePath: string;
staticDir: string;
}
const createJSX = (node: Image, pathUrl: string) => {
const jsxNode = node; const jsxNode = node;
jsxNode.type = 'jsx'; ((jsxNode as unknown) as Literal).type = 'jsx';
jsxNode.value = `<img ${node.alt ? `alt={"${escapeHtml(node.alt)}"} ` : ''}${ ((jsxNode as unknown) as Literal).value = `<img ${
node.alt ? `alt={"${escapeHtml(node.alt)}"} ` : ''
}${
node.url node.url
? `src={require("${inlineMarkdownImageFileLoader}${escapePath( ? `src={require("${inlineMarkdownImageFileLoader}${escapePath(
pathUrl, pathUrl,
@ -33,7 +42,7 @@ const createJSX = (node, pathUrl) => {
}${node.title ? ` title="${escapeHtml(node.title)}"` : ''} />`; }${node.title ? ` title="${escapeHtml(node.title)}"` : ''} />`;
if (jsxNode.url) { if (jsxNode.url) {
delete jsxNode.url; delete (jsxNode as Partial<Image>).url;
} }
if (jsxNode.alt) { if (jsxNode.alt) {
delete jsxNode.alt; delete jsxNode.alt;
@ -43,7 +52,7 @@ const createJSX = (node, pathUrl) => {
} }
}; };
async function ensureImageFileExist(imagePath, sourceFilePath) { async function ensureImageFileExist(imagePath: string, sourceFilePath: string) {
const imageExists = await fs.pathExists(imagePath); const imageExists = await fs.pathExists(imagePath);
if (!imageExists) { if (!imageExists) {
throw new Error( throw new Error(
@ -54,7 +63,10 @@ async function ensureImageFileExist(imagePath, sourceFilePath) {
} }
} }
async function processImageNode(node, {filePath, staticDir}) { async function processImageNode(
node: Image,
{filePath, staticDir}: PluginOptions,
) {
if (!node.url) { if (!node.url) {
throw new Error( throw new Error(
`Markdown image URL is mandatory in "${toMessageRelativeFilePath( `Markdown image URL is mandatory in "${toMessageRelativeFilePath(
@ -71,8 +83,6 @@ async function processImageNode(node, {filePath, staticDir}) {
// it's mostly to make next release less risky (2.0.0-alpha.59) // it's mostly to make next release less risky (2.0.0-alpha.59)
if (parsedUrl.protocol === 'pathname:') { if (parsedUrl.protocol === 'pathname:') {
node.url = node.url.replace('pathname://', ''); node.url = node.url.replace('pathname://', '');
} else {
// noop
} }
} }
// images without protocol // images without protocol
@ -92,10 +102,10 @@ async function processImageNode(node, {filePath, staticDir}) {
} }
} }
const plugin = (options) => { const plugin: Plugin<[PluginOptions]> = (options) => {
const transformer = async (root) => { const transformer: Transformer = async (root) => {
const promises = []; const promises: Promise<void>[] = [];
visit(root, 'image', (node) => { visit(root, 'image', (node: Image) => {
promises.push(processImageNode(node, options)); promises.push(processImageNode(node, options));
}); });
await Promise.all(promises); await Promise.all(promises);
@ -103,4 +113,4 @@ const plugin = (options) => {
return transformer; return transformer;
}; };
module.exports = plugin; export default plugin;

View file

@ -5,25 +5,34 @@
* LICENSE file in the root directory of this source tree. * LICENSE file in the root directory of this source tree.
*/ */
const { import {
toMessageRelativeFilePath, toMessageRelativeFilePath,
posixPath, posixPath,
escapePath, escapePath,
} = require('@docusaurus/utils'); } from '@docusaurus/utils';
import visit from 'unist-util-visit';
const visit = require('unist-util-visit'); import path from 'path';
const path = require('path'); import url from 'url';
const url = require('url'); import fs from 'fs-extra';
const fs = require('fs-extra'); import escapeHtml from 'escape-html';
const escapeHtml = require('escape-html'); import {stringifyContent} from '../utils';
const {toValue} = require('../utils'); import {getFileLoaderUtils} from '@docusaurus/core/lib/webpack/utils';
const {getFileLoaderUtils} = require('@docusaurus/core/lib/webpack/utils'); import type {Plugin, Transformer} from 'unified';
import type {Link, Literal} from 'mdast';
const { const {
loaders: {inlineMarkdownLinkFileLoader}, loaders: {inlineMarkdownLinkFileLoader},
} = getFileLoaderUtils(); } = getFileLoaderUtils();
async function ensureAssetFileExist(fileSystemAssetPath, sourceFilePath) { interface PluginOptions {
filePath: string;
staticDir: string;
}
async function ensureAssetFileExist(
fileSystemAssetPath: string,
sourceFilePath: string,
) {
const assetExists = await fs.pathExists(fileSystemAssetPath); const assetExists = await fs.pathExists(fileSystemAssetPath);
if (!assetExists) { if (!assetExists) {
throw new Error( throw new Error(
@ -35,7 +44,15 @@ async function ensureAssetFileExist(fileSystemAssetPath, sourceFilePath) {
} }
// transform the link node to a jsx link with a require() call // transform the link node to a jsx link with a require() call
function toAssetRequireNode({node, filePath, requireAssetPath}) { function toAssetRequireNode({
node,
filePath,
requireAssetPath,
}: {
node: Link;
filePath: string;
requireAssetPath: string;
}) {
/* eslint-disable no-param-reassign */ /* eslint-disable no-param-reassign */
let relativeRequireAssetPath = posixPath( let relativeRequireAssetPath = posixPath(
@ -50,17 +67,21 @@ function toAssetRequireNode({node, filePath, requireAssetPath}) {
const href = `require('${inlineMarkdownLinkFileLoader}${escapePath( const href = `require('${inlineMarkdownLinkFileLoader}${escapePath(
relativeRequireAssetPath, relativeRequireAssetPath,
)}').default`; )}').default`;
const children = (node.children || []).map((n) => toValue(n)).join(''); const children = stringifyContent(node);
const title = node.title ? `title="${escapeHtml(node.title)}"` : ''; const title = node.title ? `title="${escapeHtml(node.title)}"` : '';
node.type = 'jsx'; ((node as unknown) as Literal).type = 'jsx';
node.value = `<a target="_blank" href={${href}}${title}>${children}</a>`; ((node as unknown) as Literal).value = `<a target="_blank" href={${href}}${title}>${children}</a>`;
} }
// If the link looks like an asset link, we'll link to the asset, // If the link looks like an asset link, we'll link to the asset,
// and use a require("assetUrl") (using webpack url-loader/file-loader) // and use a require("assetUrl") (using webpack url-loader/file-loader)
// instead of navigating to such link // instead of navigating to such link
async function convertToAssetLinkIfNeeded({node, staticDir, filePath}) { async function convertToAssetLinkIfNeeded({
node,
staticDir,
filePath,
}: {node: Link} & PluginOptions) {
const assetPath = node.url; const assetPath = node.url;
const hasSiteAlias = assetPath.startsWith('@site/'); const hasSiteAlias = assetPath.startsWith('@site/');
@ -73,7 +94,7 @@ async function convertToAssetLinkIfNeeded({node, staticDir, filePath}) {
return; return;
} }
function toAssetLinkNode(requireAssetPath) { function toAssetLinkNode(requireAssetPath: string) {
toAssetRequireNode({ toAssetRequireNode({
node, node,
filePath, filePath,
@ -102,14 +123,16 @@ async function convertToAssetLinkIfNeeded({node, staticDir, filePath}) {
} }
} }
async function processLinkNode({node, _index, _parent, filePath, staticDir}) { async function processLinkNode({
node,
filePath,
staticDir,
}: {node: Link} & PluginOptions) {
if (!node.url) { if (!node.url) {
// try to improve error feedback // try to improve error feedback
// see https://github.com/facebook/docusaurus/issues/3309#issuecomment-690371675 // see https://github.com/facebook/docusaurus/issues/3309#issuecomment-690371675
const title = const title = node.title || (node.children[0] as Literal)?.value || '?';
node.title || (node.children[0] && node.children[0].value) || '?'; const line = node?.position?.start?.line || '?';
const line =
(node.position && node.position.start && node.position.start.line) || '?';
throw new Error( throw new Error(
`Markdown link URL is mandatory in "${toMessageRelativeFilePath( `Markdown link URL is mandatory in "${toMessageRelativeFilePath(
filePath, filePath,
@ -122,22 +145,18 @@ async function processLinkNode({node, _index, _parent, filePath, staticDir}) {
return; return;
} }
await convertToAssetLinkIfNeeded({ await convertToAssetLinkIfNeeded({node, staticDir, filePath});
node,
staticDir,
filePath,
});
} }
const plugin = (options) => { const plugin: Plugin<[PluginOptions]> = (options) => {
const transformer = async (root) => { const transformer: Transformer = async (root) => {
const promises = []; const promises: Promise<void>[] = [];
visit(root, 'link', (node, index, parent) => { visit(root, 'link', (node: Link) => {
promises.push(processLinkNode({node, index, parent, ...options})); promises.push(processLinkNode({node, ...options}));
}); });
await Promise.all(promises); await Promise.all(promises);
}; };
return transformer; return transformer;
}; };
module.exports = plugin; export default plugin;

View file

@ -5,24 +5,26 @@
* LICENSE file in the root directory of this source tree. * LICENSE file in the root directory of this source tree.
*/ */
const visit = require('unist-util-visit'); import visit from 'unist-util-visit';
import type {Transformer, Processor} from 'unified';
import type {Code, Parent} from 'mdast';
// This plugin is mostly to help integrating Docusaurus with translation systems // This plugin is mostly to help integrating Docusaurus with translation systems
// that do not support well MDX embedded JSX syntax (like Crowdin) // that do not support well MDX embedded JSX syntax (like Crowdin)
// We wrap the JSX syntax in code blocks so that translation tools don't mess-up with the markup // We wrap the JSX syntax in code blocks so that translation tools don't mess-up with the markup
// But the JSX inside such code blocks should still be evaluated as JSX // But the JSX inside such code blocks should still be evaluated as JSX
// See https://github.com/facebook/docusaurus/pull/4278 // See https://github.com/facebook/docusaurus/pull/4278
function plugin() { function plugin(this: Processor): Transformer {
const transformer = (root) => { const transformer: Transformer = (root) => {
visit(root, 'code', (node, _index, parent) => { visit(root, 'code', (node: Code, _index, parent) => {
if (node.lang === 'mdx-code-block') { if (node.lang === 'mdx-code-block') {
const newChildrens = this.parse(node.value).children; const newChildren = (this!.parse(node.value) as Parent).children;
// Replace the mdx code block by its content, parsed // Replace the mdx code block by its content, parsed
parent.children.splice( parent!.children.splice(
parent.children.indexOf(node), parent!.children.indexOf(node),
1, 1,
...newChildrens, ...newChildren,
); );
} }
}); });
@ -31,4 +33,4 @@ function plugin() {
return transformer; return transformer;
} }
module.exports = plugin; export default plugin;

View file

@ -1,39 +0,0 @@
/**
* Copyright (c) Facebook, Inc. and its affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree.
*/
const escapeHtml = require('escape-html');
const toString = require('mdast-util-to-string');
/**
* @param {StringValuedNode | undefined} node
* @returns {string}
*/
function toValue(node) {
if (node && node.type) {
switch (node.type) {
case 'text':
return escapeHtml(node.value);
case 'heading':
return node.children.map(toValue).join('');
case 'inlineCode':
return `<code>${escapeHtml(node.value)}</code>`;
case 'emphasis':
return `<em>${node.children.map(toValue).join('')}</em>`;
case 'strong':
return `<strong>${node.children.map(toValue).join('')}</strong>`;
case 'delete':
return `<del>${node.children.map(toValue).join('')}</del>`;
default:
}
}
return toString(node);
}
module.exports = {
toValue,
};

View file

@ -0,0 +1,39 @@
/**
* Copyright (c) Facebook, Inc. and its affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree.
*/
import escapeHtml from 'escape-html';
import toString from 'mdast-util-to-string';
import type {Parent} from 'unist';
import type {StaticPhrasingContent, Heading} from 'mdast';
export function stringifyContent(node: Parent): string {
return ((node.children || []) as StaticPhrasingContent[])
.map(toValue)
.join('');
}
export function toValue(node: StaticPhrasingContent | Heading): string {
if (node && node.type) {
switch (node.type) {
case 'text':
return escapeHtml(node.value);
case 'heading':
return stringifyContent(node);
case 'inlineCode':
return `<code>${escapeHtml(node.value)}</code>`;
case 'emphasis':
return `<em>${stringifyContent(node)}</em>`;
case 'strong':
return `<strong>${stringifyContent(node)}</strong>`;
case 'delete':
return `<del>${stringifyContent(node)}</del>`;
default:
}
}
return toString(node);
}

View file

@ -0,0 +1,38 @@
/**
* Copyright (c) Facebook, Inc. and its affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree.
*/
declare module '@docusaurus/mdx-loader' {
export interface RemarkAndRehypePluginOptions {
remarkPlugins: RemarkOrRehypePlugin[];
rehypePlugins: string[];
beforeDefaultRemarkPlugins: RemarkOrRehypePlugin[];
beforeDefaultRehypePlugins: RemarkOrRehypePlugin[];
}
}
// TODO Types provided by MDX 2.0 https://github.com/mdx-js/mdx/blob/main/packages/mdx/types/index.d.ts
declare module '@mdx-js/mdx' {
import type {Plugin, Processor} from 'unified';
export namespace mdx {
interface Options {
filepath?: string;
skipExport?: boolean;
wrapExport?: string;
remarkPlugins?: Plugin[];
rehypePlugins?: Plugin[];
}
function sync(content: string, options?: Options): string;
function createMdxAstCompiler(options?: Options): Processor;
function createCompiler(options?: Options): Processor;
}
export default function mdx(
content: string,
options?: mdx.Options,
): Promise<string>;
}

View file

@ -1,9 +1,10 @@
{ {
"extends": "../../tsconfig.json", "extends": "../../tsconfig.json",
"compilerOptions": { "compilerOptions": {
"allowJs": true, "incremental": true,
"noEmit": true, "tsBuildInfoFile": "./lib/.tsbuildinfo",
"noImplicitAny": false "rootDir": "src",
"outDir": "lib"
}, },
"include": ["src/"] "include": ["src/"]
} }

View file

@ -13,6 +13,7 @@ import visit from 'unist-util-visit';
import remarkStringify from 'remark-stringify'; import remarkStringify from 'remark-stringify';
import htmlTags from 'html-tags'; import htmlTags from 'html-tags';
import toText from 'hast-util-to-string'; import toText from 'hast-util-to-string';
import type {Code, InlineCode} from 'mdast';
const tags = htmlTags.reduce((acc: {[key: string]: boolean}, tag) => { const tags = htmlTags.reduce((acc: {[key: string]: boolean}, tag) => {
acc[tag] = true; acc[tag] = true;
@ -21,10 +22,10 @@ const tags = htmlTags.reduce((acc: {[key: string]: boolean}, tag) => {
export default function sanitizeMD(code: string): string { export default function sanitizeMD(code: string): string {
const markdownTree = unified().use(markdown).parse(code); const markdownTree = unified().use(markdown).parse(code);
visit(markdownTree, 'code', (node) => { visit(markdownTree, 'code', (node: Code) => {
node.value = `\n<!--${node.value}-->\n`; node.value = `\n<!--${node.value}-->\n`;
}); });
visit(markdownTree, 'inlineCode', (node) => { visit(markdownTree, 'inlineCode', (node: InlineCode) => {
node.value = `<!--${node.value}-->`; node.value = `<!--${node.value}-->`;
}); });
@ -33,7 +34,7 @@ export default function sanitizeMD(code: string): string {
.stringify(markdownTree); .stringify(markdownTree);
const htmlTree = unified().use(parse).parse(markdownString); const htmlTree = unified().use(parse).parse(markdownString);
visit(htmlTree, 'element', (node) => { visit(htmlTree, 'element', (node: any) => {
if (!tags[node.tagName as string]) { if (!tags[node.tagName as string]) {
node.type = 'text'; node.type = 'text';
node.value = node.tagName + toText(node); node.value = node.tagName + toText(node);

View file

@ -42,7 +42,9 @@ export default function htmlTagObjectToString(tagDefinition: unknown): string {
if (tagAttributes[attributeName] === true) { if (tagAttributes[attributeName] === true) {
return attributeName; return attributeName;
} }
return `${attributeName}="${escapeHTML(tagAttributes[attributeName])}"`; return `${attributeName}="${escapeHTML(
tagAttributes[attributeName] as string,
)}"`;
}); });
return `<${[tagDefinition.tagName].concat(attributes).join(' ')}>${ return `<${[tagDefinition.tagName].concat(attributes).join(' ')}>${
(!isVoidTag && tagDefinition.innerHTML) || '' (!isVoidTag && tagDefinition.innerHTML) || ''

View file

@ -3994,6 +3994,11 @@
"@types/got" "^8" "@types/got" "^8"
"@types/node" "*" "@types/node" "*"
"@types/escape-html@^1.0.1":
version "1.0.1"
resolved "https://registry.yarnpkg.com/@types/escape-html/-/escape-html-1.0.1.tgz#b19b4646915f0ae2c306bf984dc0a59c5cfc97ba"
integrity sha512-4mI1FuUUZiuT95fSVqvZxp/ssQK9zsa86S43h9x3zPOSU9BBJ+BfDkXwuaU7BfsD+e7U0/cUUfJFk3iW2M4okA==
"@types/eslint-scope@^3.7.0": "@types/eslint-scope@^3.7.0":
version "3.7.0" version "3.7.0"
resolved "https://registry.yarnpkg.com/@types/eslint-scope/-/eslint-scope-3.7.0.tgz#4792816e31119ebd506902a482caec4951fabd86" resolved "https://registry.yarnpkg.com/@types/eslint-scope/-/eslint-scope-3.7.0.tgz#4792816e31119ebd506902a482caec4951fabd86"
@ -4208,10 +4213,10 @@
resolved "https://registry.yarnpkg.com/@types/lodash/-/lodash-4.14.170.tgz#0d67711d4bf7f4ca5147e9091b847479b87925d6" resolved "https://registry.yarnpkg.com/@types/lodash/-/lodash-4.14.170.tgz#0d67711d4bf7f4ca5147e9091b847479b87925d6"
integrity sha512-bpcvu/MKHHeYX+qeEN8GE7DIravODWdACVA1ctevD8CN24RhPZIKMn9ntfAsrvLfSX3cR5RrBKAbYm9bGs0A+Q== integrity sha512-bpcvu/MKHHeYX+qeEN8GE7DIravODWdACVA1ctevD8CN24RhPZIKMn9ntfAsrvLfSX3cR5RrBKAbYm9bGs0A+Q==
"@types/mdast@^3.0.0": "@types/mdast@^3.0.0", "@types/mdast@^3.0.7":
version "3.0.3" version "3.0.7"
resolved "https://registry.yarnpkg.com/@types/mdast/-/mdast-3.0.3.tgz#2d7d671b1cd1ea3deb306ea75036c2a0407d2deb" resolved "https://registry.yarnpkg.com/@types/mdast/-/mdast-3.0.7.tgz#cba63d0cc11eb1605cea5c0ad76e02684394166b"
integrity sha512-SXPBMnFVQg1s00dlMCc/jCdvPqdE4mXaMMCeRlxLDmTAEoegHT53xKtkDnzDTOcmMHUfcjyf36/YYZ6SxRdnsw== integrity sha512-YwR7OK8aPmaBvMMUi+pZXBNoW2unbVbfok4YRqGMJBe1dpDlzpRkJrYEYmvjxgs5JhuQmKfDexrN98u941Zasg==
dependencies: dependencies:
"@types/unist" "*" "@types/unist" "*"
@ -4462,6 +4467,11 @@
resolved "https://registry.yarnpkg.com/@types/stack-utils/-/stack-utils-2.0.0.tgz#7036640b4e21cc2f259ae826ce843d277dad8cff" resolved "https://registry.yarnpkg.com/@types/stack-utils/-/stack-utils-2.0.0.tgz#7036640b4e21cc2f259ae826ce843d277dad8cff"
integrity sha512-RJJrrySY7A8havqpGObOB4W92QXKJo63/jFLLgpvOtsGUqbQZ9Sbgl35KMm1DjC6j7AvmmU2bIno+3IyEaemaw== integrity sha512-RJJrrySY7A8havqpGObOB4W92QXKJo63/jFLLgpvOtsGUqbQZ9Sbgl35KMm1DjC6j7AvmmU2bIno+3IyEaemaw==
"@types/stringify-object@^3.3.1":
version "3.3.1"
resolved "https://registry.yarnpkg.com/@types/stringify-object/-/stringify-object-3.3.1.tgz#9ee394931e63468de0412a8e19c9f021a7d1d24d"
integrity sha512-bpCBW0O+QrMLNFBY/+rkZtGzcYRmc2aTD8qYHOMNUmednqETfEZtFcGEA11l9xqbIeiT1PgXG0eq3zqayVzZSQ==
"@types/tapable@^1": "@types/tapable@^1":
version "1.0.7" version "1.0.7"
resolved "https://registry.yarnpkg.com/@types/tapable/-/tapable-1.0.7.tgz#545158342f949e8fd3bfd813224971ecddc3fac4" resolved "https://registry.yarnpkg.com/@types/tapable/-/tapable-1.0.7.tgz#545158342f949e8fd3bfd813224971ecddc3fac4"
@ -4474,10 +4484,10 @@
dependencies: dependencies:
source-map "^0.6.1" source-map "^0.6.1"
"@types/unist@*", "@types/unist@^2.0.0", "@types/unist@^2.0.2", "@types/unist@^2.0.3": "@types/unist@*", "@types/unist@^2.0.0", "@types/unist@^2.0.2", "@types/unist@^2.0.3", "@types/unist@^2.0.6":
version "2.0.3" version "2.0.6"
resolved "https://registry.yarnpkg.com/@types/unist/-/unist-2.0.3.tgz#9c088679876f374eb5983f150d4787aa6fb32d7e" resolved "https://registry.yarnpkg.com/@types/unist/-/unist-2.0.6.tgz#250a7b16c3b91f672a24552ec64678eeb1d3a08d"
integrity sha512-FvUupuM3rlRsRtCN+fDudtmytGO6iHJuuRKS1Ss0pG5z8oX0diNEw94UEL7hgDbpN94rgaK5R7sWm6RrSkZuAQ== integrity sha512-PBjIUxZHOuj0R15/xuwJYjFi+KZdNFrehocChv4g5hu6aFroHue8m0lBP0POdK2nKzbw0cgV1mws8+V/JAcEkQ==
"@types/wait-on@^5.2.0": "@types/wait-on@^5.2.0":
version "5.3.0" version "5.3.0"