mirror of
https://github.com/facebook/docusaurus.git
synced 2025-04-29 10:17:55 +02:00
chore(mdx-loader): migrate package to TypeScript (#5347)
* Polish code style Signed-off-by: Josh-Cena <sidachen2003@gmail.com> * Partly done migration Signed-off-by: Josh-Cena <sidachen2003@gmail.com> * Complete typing Signed-off-by: Josh-Cena <sidachen2003@gmail.com> * Fix tests Signed-off-by: Josh-Cena <sidachen2003@gmail.com> * A-ha Signed-off-by: Josh-Cena <sidachen2003@gmail.com> * Cleanup Signed-off-by: Josh-Cena <sidachen2003@gmail.com> * Fix error Signed-off-by: Josh-Cena <sidachen2003@gmail.com> * Cleanup Signed-off-by: Josh-Cena <sidachen2003@gmail.com>
This commit is contained in:
parent
ac4a253cdf
commit
3fc47938a5
27 changed files with 345 additions and 287 deletions
|
@ -46,7 +46,7 @@ module.exports = {
|
|||
// Ignore certain webpack alias because it can't be resolved
|
||||
'import/no-unresolved': [
|
||||
ERROR,
|
||||
{ignore: ['^@theme', '^@docusaurus', '^@generated']},
|
||||
{ignore: ['^@theme', '^@docusaurus', '^@generated', 'unist', 'mdast']},
|
||||
],
|
||||
'import/extensions': OFF,
|
||||
'header/header': [
|
||||
|
|
|
@ -2,8 +2,8 @@
|
|||
"name": "@docusaurus/mdx-loader",
|
||||
"version": "2.0.0-beta.4",
|
||||
"description": "Docusaurus Loader for MDX",
|
||||
"main": "src/index.js",
|
||||
"types": "src/index.d.ts",
|
||||
"main": "lib/index.js",
|
||||
"types": "src/types.d.ts",
|
||||
"publishConfig": {
|
||||
"access": "public"
|
||||
},
|
||||
|
@ -39,6 +39,10 @@
|
|||
},
|
||||
"devDependencies": {
|
||||
"@docusaurus/types": "2.0.0-beta.4",
|
||||
"@types/escape-html": "^1.0.1",
|
||||
"@types/mdast": "^3.0.7",
|
||||
"@types/stringify-object": "^3.3.1",
|
||||
"@types/unist": "^2.0.6",
|
||||
"remark": "^12.0.0",
|
||||
"remark-mdx": "^1.6.21",
|
||||
"to-vfile": "^6.0.0",
|
||||
|
|
19
packages/docusaurus-mdx-loader/src/index.d.ts
vendored
19
packages/docusaurus-mdx-loader/src/index.d.ts
vendored
|
@ -1,19 +0,0 @@
|
|||
/**
|
||||
* Copyright (c) Facebook, Inc. and its affiliates.
|
||||
*
|
||||
* This source code is licensed under the MIT license found in the
|
||||
* LICENSE file in the root directory of this source tree.
|
||||
*/
|
||||
|
||||
type RemarkOrRehypePlugin =
|
||||
// eslint-disable-next-line @typescript-eslint/ban-types
|
||||
[Function, Record<string, unknown>] | Function;
|
||||
|
||||
declare function docusaurusMdxLoader(fileString: string): string;
|
||||
|
||||
export interface RemarkAndRehypePluginOptions {
|
||||
remarkPlugins: RemarkOrRehypePlugin[];
|
||||
rehypePlugins: string[];
|
||||
beforeDefaultRemarkPlugins: RemarkOrRehypePlugin[];
|
||||
beforeDefaultRehypePlugins: RemarkOrRehypePlugin[];
|
||||
}
|
|
@ -5,36 +5,45 @@
|
|||
* LICENSE file in the root directory of this source tree.
|
||||
*/
|
||||
|
||||
const {readFile} = require('fs-extra');
|
||||
const mdx = require('@mdx-js/mdx');
|
||||
const chalk = require('chalk');
|
||||
const emoji = require('remark-emoji');
|
||||
const {
|
||||
import {readFile} from 'fs-extra';
|
||||
import mdx from '@mdx-js/mdx';
|
||||
import chalk from 'chalk';
|
||||
import emoji from 'remark-emoji';
|
||||
import {
|
||||
parseFrontMatter,
|
||||
parseMarkdownContentTitle,
|
||||
} = require('@docusaurus/utils');
|
||||
const stringifyObject = require('stringify-object');
|
||||
const headings = require('./remark/headings');
|
||||
const toc = require('./remark/toc');
|
||||
const unwrapMdxCodeBlocks = require('./remark/unwrapMdxCodeBlocks');
|
||||
const transformImage = require('./remark/transformImage');
|
||||
const transformLinks = require('./remark/transformLinks');
|
||||
const {escapePath} = require('@docusaurus/utils');
|
||||
const {getFileLoaderUtils} = require('@docusaurus/core/lib/webpack/utils');
|
||||
escapePath,
|
||||
} from '@docusaurus/utils';
|
||||
import stringifyObject from 'stringify-object';
|
||||
import headings from './remark/headings';
|
||||
import toc from './remark/toc';
|
||||
import unwrapMdxCodeBlocks from './remark/unwrapMdxCodeBlocks';
|
||||
import transformImage from './remark/transformImage';
|
||||
import transformLinks from './remark/transformLinks';
|
||||
import {getFileLoaderUtils} from '@docusaurus/core/lib/webpack/utils';
|
||||
import type {RemarkAndRehypePluginOptions} from '@docusaurus/mdx-loader';
|
||||
|
||||
// TODO temporary until Webpack5 export this type
|
||||
// see https://github.com/webpack/webpack/issues/11630
|
||||
interface Loader extends Function {
|
||||
(this: any, source: string): Promise<string | Buffer | void | undefined>;
|
||||
}
|
||||
|
||||
const {
|
||||
loaders: {inlineMarkdownImageFileLoader},
|
||||
} = getFileLoaderUtils();
|
||||
|
||||
const DEFAULT_OPTIONS = {
|
||||
const DEFAULT_OPTIONS: RemarkAndRehypePluginOptions = {
|
||||
rehypePlugins: [],
|
||||
remarkPlugins: [unwrapMdxCodeBlocks, emoji, headings, toc],
|
||||
beforeDefaultRemarkPlugins: [],
|
||||
beforeDefaultRehypePlugins: [],
|
||||
};
|
||||
|
||||
// When this throws, it generally means that there's no metadata file associated with this MDX document
|
||||
// It can happen when using MDX partials (usually starting with _)
|
||||
// That's why it's important to provide the "isMDXPartial" function in config
|
||||
async function readMetadataPath(metadataPath) {
|
||||
async function readMetadataPath(metadataPath: string) {
|
||||
try {
|
||||
return await readFile(metadataPath, 'utf8');
|
||||
} catch (e) {
|
||||
|
@ -48,15 +57,14 @@ async function readMetadataPath(metadataPath) {
|
|||
// We don't do that for all frontMatters, only for the configured keys
|
||||
// {image: "./myImage.png"} => {image: require("./myImage.png")}
|
||||
function createFrontMatterAssetsExportCode(
|
||||
filePath,
|
||||
frontMatter,
|
||||
frontMatterAssetKeys = [],
|
||||
frontMatter: Record<string, unknown>,
|
||||
frontMatterAssetKeys: string[] = [],
|
||||
) {
|
||||
if (frontMatterAssetKeys.length === 0) {
|
||||
return 'undefined';
|
||||
}
|
||||
|
||||
function createFrontMatterAssetRequireCode(value) {
|
||||
function createFrontMatterAssetRequireCode(value: unknown) {
|
||||
// Only process string values starting with ./
|
||||
// We could enhance this logic and check if file exists on disc?
|
||||
if (typeof value === 'string' && value.startsWith('./')) {
|
||||
|
@ -84,7 +92,7 @@ function createFrontMatterAssetsExportCode(
|
|||
return exportValue;
|
||||
}
|
||||
|
||||
module.exports = async function docusaurusMdxLoader(fileString) {
|
||||
const docusaurusMdxLoader: Loader = async function (fileString) {
|
||||
const callback = this.async();
|
||||
const filePath = this.resourcePath;
|
||||
const reqOptions = this.getOptions() || {};
|
||||
|
@ -122,35 +130,25 @@ module.exports = async function docusaurusMdxLoader(fileString) {
|
|||
return callback(err);
|
||||
}
|
||||
|
||||
let exportStr = ``;
|
||||
exportStr += `\nexport const frontMatter = ${stringifyObject(frontMatter)};`;
|
||||
exportStr += `\nexport const frontMatterAssets = ${createFrontMatterAssetsExportCode(
|
||||
filePath,
|
||||
let exportStr = `
|
||||
export const frontMatter = ${stringifyObject(frontMatter)};
|
||||
export const frontMatterAssets = ${createFrontMatterAssetsExportCode(
|
||||
frontMatter,
|
||||
reqOptions.frontMatterAssetKeys,
|
||||
)};`;
|
||||
exportStr += `\nexport const contentTitle = ${stringifyObject(
|
||||
contentTitle,
|
||||
)};`;
|
||||
)};
|
||||
export const contentTitle = ${stringifyObject(contentTitle)};`;
|
||||
|
||||
// MDX partials are MDX files starting with _ or in a folder starting with _
|
||||
// Partial are not expected to have an associated metadata file or frontmatter
|
||||
const isMDXPartial = options.isMDXPartial
|
||||
? options.isMDXPartial(filePath)
|
||||
: false;
|
||||
const isMDXPartial = options.isMDXPartial && options.isMDXPartial(filePath);
|
||||
|
||||
if (isMDXPartial && hasFrontMatter) {
|
||||
const errorMessage = `Docusaurus MDX partial files should not contain FrontMatter.
|
||||
Those partial files use the _ prefix as a convention by default, but this is configurable.
|
||||
File at ${filePath} contains FrontMatter that will be ignored: \n${JSON.stringify(
|
||||
frontMatter,
|
||||
null,
|
||||
2,
|
||||
)}`;
|
||||
File at ${filePath} contains FrontMatter that will be ignored:
|
||||
${JSON.stringify(frontMatter, null, 2)}`;
|
||||
|
||||
if (options.isMDXPartialFrontMatterWarningDisabled === true) {
|
||||
// no warning
|
||||
} else {
|
||||
if (!options.isMDXPartialFrontMatterWarningDisabled) {
|
||||
const shouldError = process.env.NODE_ENV === 'test' || process.env.CI;
|
||||
if (shouldError) {
|
||||
return callback(new Error(errorMessage));
|
||||
|
@ -176,12 +174,14 @@ File at ${filePath} contains FrontMatter that will be ignored: \n${JSON.stringif
|
|||
}
|
||||
|
||||
const code = `
|
||||
import React from 'react';
|
||||
import { mdx } from '@mdx-js/react';
|
||||
import React from 'react';
|
||||
import { mdx } from '@mdx-js/react';
|
||||
|
||||
${exportStr}
|
||||
${result}
|
||||
`;
|
||||
${exportStr}
|
||||
${result}
|
||||
`;
|
||||
|
||||
return callback(null, code);
|
||||
};
|
||||
|
||||
export default docusaurusMdxLoader;
|
|
@ -7,18 +7,25 @@
|
|||
|
||||
/* Based on remark-slug (https://github.com/remarkjs/remark-slug) and gatsby-remark-autolink-headers (https://github.com/gatsbyjs/gatsby/blob/master/packages/gatsby-remark-autolink-headers) */
|
||||
|
||||
const {parseMarkdownHeadingId} = require('@docusaurus/utils');
|
||||
const visit = require('unist-util-visit');
|
||||
const toString = require('mdast-util-to-string');
|
||||
const slugs = require('github-slugger')();
|
||||
import {parseMarkdownHeadingId} from '@docusaurus/utils';
|
||||
import visit, {Visitor} from 'unist-util-visit';
|
||||
import toString from 'mdast-util-to-string';
|
||||
import Slugger from 'github-slugger';
|
||||
import type {Transformer} from 'unified';
|
||||
import type {Parent} from 'unist';
|
||||
import type {Heading, Text} from 'mdast';
|
||||
|
||||
function headings() {
|
||||
const transformer = (ast) => {
|
||||
const slugs = new Slugger();
|
||||
|
||||
function headings(): Transformer {
|
||||
const transformer: Transformer = (ast) => {
|
||||
slugs.reset();
|
||||
|
||||
function visitor(headingNode) {
|
||||
const data = headingNode.data || (headingNode.data = {}); // eslint-disable-line
|
||||
const properties = data.hProperties || (data.hProperties = {});
|
||||
const visitor: Visitor<Heading> = (headingNode) => {
|
||||
const data = headingNode.data || (headingNode.data = {});
|
||||
const properties = (data.hProperties || (data.hProperties = {})) as {
|
||||
id: string;
|
||||
};
|
||||
let {id} = properties;
|
||||
|
||||
if (id) {
|
||||
|
@ -29,7 +36,7 @@ function headings() {
|
|||
);
|
||||
const heading = toString(
|
||||
headingTextNodes.length > 0
|
||||
? {children: headingTextNodes}
|
||||
? ({children: headingTextNodes} as Parent)
|
||||
: headingNode,
|
||||
);
|
||||
|
||||
|
@ -42,8 +49,9 @@ function headings() {
|
|||
// When there's an id, it is always in the last child node
|
||||
// Sometimes heading is in multiple "parts" (** syntax creates a child node):
|
||||
// ## part1 *part2* part3 {#id}
|
||||
const lastNode =
|
||||
headingNode.children[headingNode.children.length - 1];
|
||||
const lastNode = headingNode.children[
|
||||
headingNode.children.length - 1
|
||||
] as Text;
|
||||
|
||||
if (headingNode.children.length > 1) {
|
||||
const lastNodeText = parseMarkdownHeadingId(lastNode.value).text;
|
||||
|
@ -63,7 +71,7 @@ function headings() {
|
|||
|
||||
data.id = id;
|
||||
properties.id = id;
|
||||
}
|
||||
};
|
||||
|
||||
visit(ast, 'heading', visitor);
|
||||
};
|
||||
|
@ -71,4 +79,4 @@ function headings() {
|
|||
return transformer;
|
||||
}
|
||||
|
||||
module.exports = headings;
|
||||
export default headings;
|
|
@ -5,34 +5,42 @@
|
|||
* LICENSE file in the root directory of this source tree.
|
||||
*/
|
||||
|
||||
const {parse} = require('@babel/parser');
|
||||
const traverse = require('@babel/traverse').default;
|
||||
const stringifyObject = require('stringify-object');
|
||||
const search = require('./search');
|
||||
import {parse, ParserOptions} from '@babel/parser';
|
||||
import type {Identifier} from '@babel/types';
|
||||
import traverse from '@babel/traverse';
|
||||
import stringifyObject from 'stringify-object';
|
||||
import search from './search';
|
||||
import type {Plugin, Transformer} from 'unified';
|
||||
import type {Node, Parent} from 'unist';
|
||||
import type {Literal} from 'mdast';
|
||||
|
||||
const parseOptions = {
|
||||
const parseOptions: ParserOptions = {
|
||||
plugins: ['jsx'],
|
||||
sourceType: 'module',
|
||||
};
|
||||
const isImport = (child) => child.type === 'import';
|
||||
const hasImports = (index) => index > -1;
|
||||
const isExport = (child) => child.type === 'export';
|
||||
|
||||
const isTarget = (child, name) => {
|
||||
const isImport = (child: Node): child is Literal => child.type === 'import';
|
||||
const hasImports = (index: number) => index > -1;
|
||||
const isExport = (child: Node): child is Literal => child.type === 'export';
|
||||
|
||||
interface PluginOptions {
|
||||
name?: string;
|
||||
}
|
||||
|
||||
const isTarget = (child: Literal, name: string) => {
|
||||
let found = false;
|
||||
const ast = parse(child.value, parseOptions);
|
||||
traverse(ast, {
|
||||
VariableDeclarator: (path) => {
|
||||
if (path.node.id.name === name) {
|
||||
if ((path.node.id as Identifier).name === name) {
|
||||
found = true;
|
||||
}
|
||||
},
|
||||
});
|
||||
|
||||
return found;
|
||||
};
|
||||
|
||||
const getOrCreateExistingTargetIndex = (children, name) => {
|
||||
const getOrCreateExistingTargetIndex = (children: Node[], name: string) => {
|
||||
let importsIndex = -1;
|
||||
let targetIndex = -1;
|
||||
|
||||
|
@ -58,12 +66,12 @@ const getOrCreateExistingTargetIndex = (children, name) => {
|
|||
return targetIndex;
|
||||
};
|
||||
|
||||
const plugin = (options = {}) => {
|
||||
const plugin: Plugin<[PluginOptions?]> = (options = {}) => {
|
||||
const name = options.name || 'toc';
|
||||
|
||||
const transformer = (node) => {
|
||||
const transformer: Transformer = (node) => {
|
||||
const headings = search(node);
|
||||
const {children} = node;
|
||||
const {children} = node as Parent<Literal>;
|
||||
const targetIndex = getOrCreateExistingTargetIndex(children, name);
|
||||
|
||||
if (headings && headings.length) {
|
||||
|
@ -76,4 +84,4 @@ const plugin = (options = {}) => {
|
|||
return transformer;
|
||||
};
|
||||
|
||||
module.exports = plugin;
|
||||
export default plugin;
|
|
@ -1,73 +0,0 @@
|
|||
/**
|
||||
* Copyright (c) Facebook, Inc. and its affiliates.
|
||||
*
|
||||
* This source code is licensed under the MIT license found in the
|
||||
* LICENSE file in the root directory of this source tree.
|
||||
*/
|
||||
|
||||
// @ts-check
|
||||
|
||||
const toString = require('mdast-util-to-string');
|
||||
const visit = require('unist-util-visit');
|
||||
// Destructuring require tslib
|
||||
// eslint-disable-next-line prefer-destructuring
|
||||
const toValue = require('../utils').toValue;
|
||||
|
||||
/** @typedef {import('@docusaurus/types').TOCItem} TOC */
|
||||
/** @typedef {import('unist').Node} Node */
|
||||
|
||||
/**
|
||||
* @typedef {Object} StringValuedNode
|
||||
* @property {string} type
|
||||
* @property {string} value
|
||||
* @property {number} depth
|
||||
* @property {Object} data
|
||||
* @property {StringValuedNode[]} children
|
||||
*/
|
||||
|
||||
// Visit all headings. We `slug` all headings (to account for
|
||||
// duplicates), but only take h2 and h3 headings.
|
||||
/**
|
||||
* @param {StringValuedNode} node
|
||||
* @returns {TOC[]}
|
||||
*/
|
||||
function search(node) {
|
||||
/** @type {TOC[]} */
|
||||
const headings = [];
|
||||
let current = -1;
|
||||
let currentDepth = 0;
|
||||
|
||||
/**
|
||||
* @param {StringValuedNode} child
|
||||
* @param {number} index
|
||||
* @param {Node | undefined} parent
|
||||
* @returns {void}
|
||||
*/
|
||||
const onHeading = (child, index, parent) => {
|
||||
const value = toString(child);
|
||||
|
||||
if (parent !== node || !value || child.depth > 3 || child.depth < 2) {
|
||||
return;
|
||||
}
|
||||
|
||||
const entry = {
|
||||
value: toValue(child),
|
||||
id: child.data.id,
|
||||
children: [],
|
||||
};
|
||||
|
||||
if (!headings.length || currentDepth >= child.depth) {
|
||||
headings.push(entry);
|
||||
current += 1;
|
||||
currentDepth = child.depth;
|
||||
} else {
|
||||
headings[current].children.push(entry);
|
||||
}
|
||||
};
|
||||
|
||||
visit(node, 'heading', onHeading);
|
||||
|
||||
return headings;
|
||||
}
|
||||
|
||||
module.exports = search;
|
47
packages/docusaurus-mdx-loader/src/remark/toc/search.ts
Normal file
47
packages/docusaurus-mdx-loader/src/remark/toc/search.ts
Normal file
|
@ -0,0 +1,47 @@
|
|||
/**
|
||||
* Copyright (c) Facebook, Inc. and its affiliates.
|
||||
*
|
||||
* This source code is licensed under the MIT license found in the
|
||||
* LICENSE file in the root directory of this source tree.
|
||||
*/
|
||||
|
||||
import toString from 'mdast-util-to-string';
|
||||
import visit, {Visitor} from 'unist-util-visit';
|
||||
import {toValue} from '../utils';
|
||||
import type {TOCItem as TOC} from '@docusaurus/types';
|
||||
import type {Node} from 'unist';
|
||||
import type {Heading} from 'mdast';
|
||||
|
||||
// Visit all headings. We `slug` all headings (to account for
|
||||
// duplicates), but only take h2 and h3 headings.
|
||||
export default function search(node: Node): TOC[] {
|
||||
const headings: TOC[] = [];
|
||||
let current = -1;
|
||||
let currentDepth = 0;
|
||||
|
||||
const visitor: Visitor<Heading> = (child, _index, parent) => {
|
||||
const value = toString(child);
|
||||
|
||||
if (parent !== node || !value || child.depth > 3 || child.depth < 2) {
|
||||
return;
|
||||
}
|
||||
|
||||
const entry: TOC = {
|
||||
value: toValue(child),
|
||||
id: child.data!.id as string,
|
||||
children: [],
|
||||
};
|
||||
|
||||
if (!headings.length || currentDepth >= child.depth) {
|
||||
headings.push(entry);
|
||||
current += 1;
|
||||
currentDepth = child.depth;
|
||||
} else {
|
||||
headings[current].children.push(entry);
|
||||
}
|
||||
};
|
||||
|
||||
visit(node, 'heading', visitor);
|
||||
|
||||
return headings;
|
||||
}
|
|
@ -5,26 +5,35 @@
|
|||
* LICENSE file in the root directory of this source tree.
|
||||
*/
|
||||
|
||||
const visit = require('unist-util-visit');
|
||||
const path = require('path');
|
||||
const url = require('url');
|
||||
const fs = require('fs-extra');
|
||||
const escapeHtml = require('escape-html');
|
||||
const {getFileLoaderUtils} = require('@docusaurus/core/lib/webpack/utils');
|
||||
const {
|
||||
import visit from 'unist-util-visit';
|
||||
import path from 'path';
|
||||
import url from 'url';
|
||||
import fs from 'fs-extra';
|
||||
import escapeHtml from 'escape-html';
|
||||
import {getFileLoaderUtils} from '@docusaurus/core/lib/webpack/utils';
|
||||
import {
|
||||
posixPath,
|
||||
escapePath,
|
||||
toMessageRelativeFilePath,
|
||||
} = require('@docusaurus/utils');
|
||||
} from '@docusaurus/utils';
|
||||
import type {Plugin, Transformer} from 'unified';
|
||||
import type {Image, Literal} from 'mdast';
|
||||
|
||||
const {
|
||||
loaders: {inlineMarkdownImageFileLoader},
|
||||
} = getFileLoaderUtils();
|
||||
|
||||
const createJSX = (node, pathUrl) => {
|
||||
interface PluginOptions {
|
||||
filePath: string;
|
||||
staticDir: string;
|
||||
}
|
||||
|
||||
const createJSX = (node: Image, pathUrl: string) => {
|
||||
const jsxNode = node;
|
||||
jsxNode.type = 'jsx';
|
||||
jsxNode.value = `<img ${node.alt ? `alt={"${escapeHtml(node.alt)}"} ` : ''}${
|
||||
((jsxNode as unknown) as Literal).type = 'jsx';
|
||||
((jsxNode as unknown) as Literal).value = `<img ${
|
||||
node.alt ? `alt={"${escapeHtml(node.alt)}"} ` : ''
|
||||
}${
|
||||
node.url
|
||||
? `src={require("${inlineMarkdownImageFileLoader}${escapePath(
|
||||
pathUrl,
|
||||
|
@ -33,7 +42,7 @@ const createJSX = (node, pathUrl) => {
|
|||
}${node.title ? ` title="${escapeHtml(node.title)}"` : ''} />`;
|
||||
|
||||
if (jsxNode.url) {
|
||||
delete jsxNode.url;
|
||||
delete (jsxNode as Partial<Image>).url;
|
||||
}
|
||||
if (jsxNode.alt) {
|
||||
delete jsxNode.alt;
|
||||
|
@ -43,7 +52,7 @@ const createJSX = (node, pathUrl) => {
|
|||
}
|
||||
};
|
||||
|
||||
async function ensureImageFileExist(imagePath, sourceFilePath) {
|
||||
async function ensureImageFileExist(imagePath: string, sourceFilePath: string) {
|
||||
const imageExists = await fs.pathExists(imagePath);
|
||||
if (!imageExists) {
|
||||
throw new Error(
|
||||
|
@ -54,7 +63,10 @@ async function ensureImageFileExist(imagePath, sourceFilePath) {
|
|||
}
|
||||
}
|
||||
|
||||
async function processImageNode(node, {filePath, staticDir}) {
|
||||
async function processImageNode(
|
||||
node: Image,
|
||||
{filePath, staticDir}: PluginOptions,
|
||||
) {
|
||||
if (!node.url) {
|
||||
throw new Error(
|
||||
`Markdown image URL is mandatory in "${toMessageRelativeFilePath(
|
||||
|
@ -71,8 +83,6 @@ async function processImageNode(node, {filePath, staticDir}) {
|
|||
// it's mostly to make next release less risky (2.0.0-alpha.59)
|
||||
if (parsedUrl.protocol === 'pathname:') {
|
||||
node.url = node.url.replace('pathname://', '');
|
||||
} else {
|
||||
// noop
|
||||
}
|
||||
}
|
||||
// images without protocol
|
||||
|
@ -92,10 +102,10 @@ async function processImageNode(node, {filePath, staticDir}) {
|
|||
}
|
||||
}
|
||||
|
||||
const plugin = (options) => {
|
||||
const transformer = async (root) => {
|
||||
const promises = [];
|
||||
visit(root, 'image', (node) => {
|
||||
const plugin: Plugin<[PluginOptions]> = (options) => {
|
||||
const transformer: Transformer = async (root) => {
|
||||
const promises: Promise<void>[] = [];
|
||||
visit(root, 'image', (node: Image) => {
|
||||
promises.push(processImageNode(node, options));
|
||||
});
|
||||
await Promise.all(promises);
|
||||
|
@ -103,4 +113,4 @@ const plugin = (options) => {
|
|||
return transformer;
|
||||
};
|
||||
|
||||
module.exports = plugin;
|
||||
export default plugin;
|
|
@ -5,25 +5,34 @@
|
|||
* LICENSE file in the root directory of this source tree.
|
||||
*/
|
||||
|
||||
const {
|
||||
import {
|
||||
toMessageRelativeFilePath,
|
||||
posixPath,
|
||||
escapePath,
|
||||
} = require('@docusaurus/utils');
|
||||
|
||||
const visit = require('unist-util-visit');
|
||||
const path = require('path');
|
||||
const url = require('url');
|
||||
const fs = require('fs-extra');
|
||||
const escapeHtml = require('escape-html');
|
||||
const {toValue} = require('../utils');
|
||||
const {getFileLoaderUtils} = require('@docusaurus/core/lib/webpack/utils');
|
||||
} from '@docusaurus/utils';
|
||||
import visit from 'unist-util-visit';
|
||||
import path from 'path';
|
||||
import url from 'url';
|
||||
import fs from 'fs-extra';
|
||||
import escapeHtml from 'escape-html';
|
||||
import {stringifyContent} from '../utils';
|
||||
import {getFileLoaderUtils} from '@docusaurus/core/lib/webpack/utils';
|
||||
import type {Plugin, Transformer} from 'unified';
|
||||
import type {Link, Literal} from 'mdast';
|
||||
|
||||
const {
|
||||
loaders: {inlineMarkdownLinkFileLoader},
|
||||
} = getFileLoaderUtils();
|
||||
|
||||
async function ensureAssetFileExist(fileSystemAssetPath, sourceFilePath) {
|
||||
interface PluginOptions {
|
||||
filePath: string;
|
||||
staticDir: string;
|
||||
}
|
||||
|
||||
async function ensureAssetFileExist(
|
||||
fileSystemAssetPath: string,
|
||||
sourceFilePath: string,
|
||||
) {
|
||||
const assetExists = await fs.pathExists(fileSystemAssetPath);
|
||||
if (!assetExists) {
|
||||
throw new Error(
|
||||
|
@ -35,7 +44,15 @@ async function ensureAssetFileExist(fileSystemAssetPath, sourceFilePath) {
|
|||
}
|
||||
|
||||
// transform the link node to a jsx link with a require() call
|
||||
function toAssetRequireNode({node, filePath, requireAssetPath}) {
|
||||
function toAssetRequireNode({
|
||||
node,
|
||||
filePath,
|
||||
requireAssetPath,
|
||||
}: {
|
||||
node: Link;
|
||||
filePath: string;
|
||||
requireAssetPath: string;
|
||||
}) {
|
||||
/* eslint-disable no-param-reassign */
|
||||
|
||||
let relativeRequireAssetPath = posixPath(
|
||||
|
@ -50,17 +67,21 @@ function toAssetRequireNode({node, filePath, requireAssetPath}) {
|
|||
const href = `require('${inlineMarkdownLinkFileLoader}${escapePath(
|
||||
relativeRequireAssetPath,
|
||||
)}').default`;
|
||||
const children = (node.children || []).map((n) => toValue(n)).join('');
|
||||
const children = stringifyContent(node);
|
||||
const title = node.title ? `title="${escapeHtml(node.title)}"` : '';
|
||||
|
||||
node.type = 'jsx';
|
||||
node.value = `<a target="_blank" href={${href}}${title}>${children}</a>`;
|
||||
((node as unknown) as Literal).type = 'jsx';
|
||||
((node as unknown) as Literal).value = `<a target="_blank" href={${href}}${title}>${children}</a>`;
|
||||
}
|
||||
|
||||
// If the link looks like an asset link, we'll link to the asset,
|
||||
// and use a require("assetUrl") (using webpack url-loader/file-loader)
|
||||
// instead of navigating to such link
|
||||
async function convertToAssetLinkIfNeeded({node, staticDir, filePath}) {
|
||||
async function convertToAssetLinkIfNeeded({
|
||||
node,
|
||||
staticDir,
|
||||
filePath,
|
||||
}: {node: Link} & PluginOptions) {
|
||||
const assetPath = node.url;
|
||||
|
||||
const hasSiteAlias = assetPath.startsWith('@site/');
|
||||
|
@ -73,7 +94,7 @@ async function convertToAssetLinkIfNeeded({node, staticDir, filePath}) {
|
|||
return;
|
||||
}
|
||||
|
||||
function toAssetLinkNode(requireAssetPath) {
|
||||
function toAssetLinkNode(requireAssetPath: string) {
|
||||
toAssetRequireNode({
|
||||
node,
|
||||
filePath,
|
||||
|
@ -102,14 +123,16 @@ async function convertToAssetLinkIfNeeded({node, staticDir, filePath}) {
|
|||
}
|
||||
}
|
||||
|
||||
async function processLinkNode({node, _index, _parent, filePath, staticDir}) {
|
||||
async function processLinkNode({
|
||||
node,
|
||||
filePath,
|
||||
staticDir,
|
||||
}: {node: Link} & PluginOptions) {
|
||||
if (!node.url) {
|
||||
// try to improve error feedback
|
||||
// see https://github.com/facebook/docusaurus/issues/3309#issuecomment-690371675
|
||||
const title =
|
||||
node.title || (node.children[0] && node.children[0].value) || '?';
|
||||
const line =
|
||||
(node.position && node.position.start && node.position.start.line) || '?';
|
||||
const title = node.title || (node.children[0] as Literal)?.value || '?';
|
||||
const line = node?.position?.start?.line || '?';
|
||||
throw new Error(
|
||||
`Markdown link URL is mandatory in "${toMessageRelativeFilePath(
|
||||
filePath,
|
||||
|
@ -122,22 +145,18 @@ async function processLinkNode({node, _index, _parent, filePath, staticDir}) {
|
|||
return;
|
||||
}
|
||||
|
||||
await convertToAssetLinkIfNeeded({
|
||||
node,
|
||||
staticDir,
|
||||
filePath,
|
||||
});
|
||||
await convertToAssetLinkIfNeeded({node, staticDir, filePath});
|
||||
}
|
||||
|
||||
const plugin = (options) => {
|
||||
const transformer = async (root) => {
|
||||
const promises = [];
|
||||
visit(root, 'link', (node, index, parent) => {
|
||||
promises.push(processLinkNode({node, index, parent, ...options}));
|
||||
const plugin: Plugin<[PluginOptions]> = (options) => {
|
||||
const transformer: Transformer = async (root) => {
|
||||
const promises: Promise<void>[] = [];
|
||||
visit(root, 'link', (node: Link) => {
|
||||
promises.push(processLinkNode({node, ...options}));
|
||||
});
|
||||
await Promise.all(promises);
|
||||
};
|
||||
return transformer;
|
||||
};
|
||||
|
||||
module.exports = plugin;
|
||||
export default plugin;
|
|
@ -5,24 +5,26 @@
|
|||
* LICENSE file in the root directory of this source tree.
|
||||
*/
|
||||
|
||||
const visit = require('unist-util-visit');
|
||||
import visit from 'unist-util-visit';
|
||||
import type {Transformer, Processor} from 'unified';
|
||||
import type {Code, Parent} from 'mdast';
|
||||
|
||||
// This plugin is mostly to help integrating Docusaurus with translation systems
|
||||
// that do not support well MDX embedded JSX syntax (like Crowdin)
|
||||
// We wrap the JSX syntax in code blocks so that translation tools don't mess-up with the markup
|
||||
// But the JSX inside such code blocks should still be evaluated as JSX
|
||||
// See https://github.com/facebook/docusaurus/pull/4278
|
||||
function plugin() {
|
||||
const transformer = (root) => {
|
||||
visit(root, 'code', (node, _index, parent) => {
|
||||
function plugin(this: Processor): Transformer {
|
||||
const transformer: Transformer = (root) => {
|
||||
visit(root, 'code', (node: Code, _index, parent) => {
|
||||
if (node.lang === 'mdx-code-block') {
|
||||
const newChildrens = this.parse(node.value).children;
|
||||
const newChildren = (this!.parse(node.value) as Parent).children;
|
||||
|
||||
// Replace the mdx code block by its content, parsed
|
||||
parent.children.splice(
|
||||
parent.children.indexOf(node),
|
||||
parent!.children.splice(
|
||||
parent!.children.indexOf(node),
|
||||
1,
|
||||
...newChildrens,
|
||||
...newChildren,
|
||||
);
|
||||
}
|
||||
});
|
||||
|
@ -31,4 +33,4 @@ function plugin() {
|
|||
return transformer;
|
||||
}
|
||||
|
||||
module.exports = plugin;
|
||||
export default plugin;
|
|
@ -1,39 +0,0 @@
|
|||
/**
|
||||
* Copyright (c) Facebook, Inc. and its affiliates.
|
||||
*
|
||||
* This source code is licensed under the MIT license found in the
|
||||
* LICENSE file in the root directory of this source tree.
|
||||
*/
|
||||
|
||||
const escapeHtml = require('escape-html');
|
||||
const toString = require('mdast-util-to-string');
|
||||
|
||||
/**
|
||||
* @param {StringValuedNode | undefined} node
|
||||
* @returns {string}
|
||||
*/
|
||||
function toValue(node) {
|
||||
if (node && node.type) {
|
||||
switch (node.type) {
|
||||
case 'text':
|
||||
return escapeHtml(node.value);
|
||||
case 'heading':
|
||||
return node.children.map(toValue).join('');
|
||||
case 'inlineCode':
|
||||
return `<code>${escapeHtml(node.value)}</code>`;
|
||||
case 'emphasis':
|
||||
return `<em>${node.children.map(toValue).join('')}</em>`;
|
||||
case 'strong':
|
||||
return `<strong>${node.children.map(toValue).join('')}</strong>`;
|
||||
case 'delete':
|
||||
return `<del>${node.children.map(toValue).join('')}</del>`;
|
||||
default:
|
||||
}
|
||||
}
|
||||
|
||||
return toString(node);
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
toValue,
|
||||
};
|
39
packages/docusaurus-mdx-loader/src/remark/utils/index.ts
Normal file
39
packages/docusaurus-mdx-loader/src/remark/utils/index.ts
Normal file
|
@ -0,0 +1,39 @@
|
|||
/**
|
||||
* Copyright (c) Facebook, Inc. and its affiliates.
|
||||
*
|
||||
* This source code is licensed under the MIT license found in the
|
||||
* LICENSE file in the root directory of this source tree.
|
||||
*/
|
||||
|
||||
import escapeHtml from 'escape-html';
|
||||
import toString from 'mdast-util-to-string';
|
||||
import type {Parent} from 'unist';
|
||||
import type {StaticPhrasingContent, Heading} from 'mdast';
|
||||
|
||||
export function stringifyContent(node: Parent): string {
|
||||
return ((node.children || []) as StaticPhrasingContent[])
|
||||
.map(toValue)
|
||||
.join('');
|
||||
}
|
||||
|
||||
export function toValue(node: StaticPhrasingContent | Heading): string {
|
||||
if (node && node.type) {
|
||||
switch (node.type) {
|
||||
case 'text':
|
||||
return escapeHtml(node.value);
|
||||
case 'heading':
|
||||
return stringifyContent(node);
|
||||
case 'inlineCode':
|
||||
return `<code>${escapeHtml(node.value)}</code>`;
|
||||
case 'emphasis':
|
||||
return `<em>${stringifyContent(node)}</em>`;
|
||||
case 'strong':
|
||||
return `<strong>${stringifyContent(node)}</strong>`;
|
||||
case 'delete':
|
||||
return `<del>${stringifyContent(node)}</del>`;
|
||||
default:
|
||||
}
|
||||
}
|
||||
|
||||
return toString(node);
|
||||
}
|
38
packages/docusaurus-mdx-loader/src/types.d.ts
vendored
Normal file
38
packages/docusaurus-mdx-loader/src/types.d.ts
vendored
Normal file
|
@ -0,0 +1,38 @@
|
|||
/**
|
||||
* Copyright (c) Facebook, Inc. and its affiliates.
|
||||
*
|
||||
* This source code is licensed under the MIT license found in the
|
||||
* LICENSE file in the root directory of this source tree.
|
||||
*/
|
||||
|
||||
declare module '@docusaurus/mdx-loader' {
|
||||
export interface RemarkAndRehypePluginOptions {
|
||||
remarkPlugins: RemarkOrRehypePlugin[];
|
||||
rehypePlugins: string[];
|
||||
beforeDefaultRemarkPlugins: RemarkOrRehypePlugin[];
|
||||
beforeDefaultRehypePlugins: RemarkOrRehypePlugin[];
|
||||
}
|
||||
}
|
||||
|
||||
// TODO Types provided by MDX 2.0 https://github.com/mdx-js/mdx/blob/main/packages/mdx/types/index.d.ts
|
||||
declare module '@mdx-js/mdx' {
|
||||
import type {Plugin, Processor} from 'unified';
|
||||
|
||||
export namespace mdx {
|
||||
interface Options {
|
||||
filepath?: string;
|
||||
skipExport?: boolean;
|
||||
wrapExport?: string;
|
||||
remarkPlugins?: Plugin[];
|
||||
rehypePlugins?: Plugin[];
|
||||
}
|
||||
|
||||
function sync(content: string, options?: Options): string;
|
||||
function createMdxAstCompiler(options?: Options): Processor;
|
||||
function createCompiler(options?: Options): Processor;
|
||||
}
|
||||
export default function mdx(
|
||||
content: string,
|
||||
options?: mdx.Options,
|
||||
): Promise<string>;
|
||||
}
|
|
@ -1,9 +1,10 @@
|
|||
{
|
||||
"extends": "../../tsconfig.json",
|
||||
"compilerOptions": {
|
||||
"allowJs": true,
|
||||
"noEmit": true,
|
||||
"noImplicitAny": false
|
||||
"incremental": true,
|
||||
"tsBuildInfoFile": "./lib/.tsbuildinfo",
|
||||
"rootDir": "src",
|
||||
"outDir": "lib"
|
||||
},
|
||||
"include": ["src/"]
|
||||
}
|
||||
|
|
|
@ -13,6 +13,7 @@ import visit from 'unist-util-visit';
|
|||
import remarkStringify from 'remark-stringify';
|
||||
import htmlTags from 'html-tags';
|
||||
import toText from 'hast-util-to-string';
|
||||
import type {Code, InlineCode} from 'mdast';
|
||||
|
||||
const tags = htmlTags.reduce((acc: {[key: string]: boolean}, tag) => {
|
||||
acc[tag] = true;
|
||||
|
@ -21,10 +22,10 @@ const tags = htmlTags.reduce((acc: {[key: string]: boolean}, tag) => {
|
|||
|
||||
export default function sanitizeMD(code: string): string {
|
||||
const markdownTree = unified().use(markdown).parse(code);
|
||||
visit(markdownTree, 'code', (node) => {
|
||||
visit(markdownTree, 'code', (node: Code) => {
|
||||
node.value = `\n<!--${node.value}-->\n`;
|
||||
});
|
||||
visit(markdownTree, 'inlineCode', (node) => {
|
||||
visit(markdownTree, 'inlineCode', (node: InlineCode) => {
|
||||
node.value = `<!--${node.value}-->`;
|
||||
});
|
||||
|
||||
|
@ -33,7 +34,7 @@ export default function sanitizeMD(code: string): string {
|
|||
.stringify(markdownTree);
|
||||
|
||||
const htmlTree = unified().use(parse).parse(markdownString);
|
||||
visit(htmlTree, 'element', (node) => {
|
||||
visit(htmlTree, 'element', (node: any) => {
|
||||
if (!tags[node.tagName as string]) {
|
||||
node.type = 'text';
|
||||
node.value = node.tagName + toText(node);
|
||||
|
|
|
@ -42,7 +42,9 @@ export default function htmlTagObjectToString(tagDefinition: unknown): string {
|
|||
if (tagAttributes[attributeName] === true) {
|
||||
return attributeName;
|
||||
}
|
||||
return `${attributeName}="${escapeHTML(tagAttributes[attributeName])}"`;
|
||||
return `${attributeName}="${escapeHTML(
|
||||
tagAttributes[attributeName] as string,
|
||||
)}"`;
|
||||
});
|
||||
return `<${[tagDefinition.tagName].concat(attributes).join(' ')}>${
|
||||
(!isVoidTag && tagDefinition.innerHTML) || ''
|
||||
|
|
26
yarn.lock
26
yarn.lock
|
@ -3994,6 +3994,11 @@
|
|||
"@types/got" "^8"
|
||||
"@types/node" "*"
|
||||
|
||||
"@types/escape-html@^1.0.1":
|
||||
version "1.0.1"
|
||||
resolved "https://registry.yarnpkg.com/@types/escape-html/-/escape-html-1.0.1.tgz#b19b4646915f0ae2c306bf984dc0a59c5cfc97ba"
|
||||
integrity sha512-4mI1FuUUZiuT95fSVqvZxp/ssQK9zsa86S43h9x3zPOSU9BBJ+BfDkXwuaU7BfsD+e7U0/cUUfJFk3iW2M4okA==
|
||||
|
||||
"@types/eslint-scope@^3.7.0":
|
||||
version "3.7.0"
|
||||
resolved "https://registry.yarnpkg.com/@types/eslint-scope/-/eslint-scope-3.7.0.tgz#4792816e31119ebd506902a482caec4951fabd86"
|
||||
|
@ -4208,10 +4213,10 @@
|
|||
resolved "https://registry.yarnpkg.com/@types/lodash/-/lodash-4.14.170.tgz#0d67711d4bf7f4ca5147e9091b847479b87925d6"
|
||||
integrity sha512-bpcvu/MKHHeYX+qeEN8GE7DIravODWdACVA1ctevD8CN24RhPZIKMn9ntfAsrvLfSX3cR5RrBKAbYm9bGs0A+Q==
|
||||
|
||||
"@types/mdast@^3.0.0":
|
||||
version "3.0.3"
|
||||
resolved "https://registry.yarnpkg.com/@types/mdast/-/mdast-3.0.3.tgz#2d7d671b1cd1ea3deb306ea75036c2a0407d2deb"
|
||||
integrity sha512-SXPBMnFVQg1s00dlMCc/jCdvPqdE4mXaMMCeRlxLDmTAEoegHT53xKtkDnzDTOcmMHUfcjyf36/YYZ6SxRdnsw==
|
||||
"@types/mdast@^3.0.0", "@types/mdast@^3.0.7":
|
||||
version "3.0.7"
|
||||
resolved "https://registry.yarnpkg.com/@types/mdast/-/mdast-3.0.7.tgz#cba63d0cc11eb1605cea5c0ad76e02684394166b"
|
||||
integrity sha512-YwR7OK8aPmaBvMMUi+pZXBNoW2unbVbfok4YRqGMJBe1dpDlzpRkJrYEYmvjxgs5JhuQmKfDexrN98u941Zasg==
|
||||
dependencies:
|
||||
"@types/unist" "*"
|
||||
|
||||
|
@ -4462,6 +4467,11 @@
|
|||
resolved "https://registry.yarnpkg.com/@types/stack-utils/-/stack-utils-2.0.0.tgz#7036640b4e21cc2f259ae826ce843d277dad8cff"
|
||||
integrity sha512-RJJrrySY7A8havqpGObOB4W92QXKJo63/jFLLgpvOtsGUqbQZ9Sbgl35KMm1DjC6j7AvmmU2bIno+3IyEaemaw==
|
||||
|
||||
"@types/stringify-object@^3.3.1":
|
||||
version "3.3.1"
|
||||
resolved "https://registry.yarnpkg.com/@types/stringify-object/-/stringify-object-3.3.1.tgz#9ee394931e63468de0412a8e19c9f021a7d1d24d"
|
||||
integrity sha512-bpCBW0O+QrMLNFBY/+rkZtGzcYRmc2aTD8qYHOMNUmednqETfEZtFcGEA11l9xqbIeiT1PgXG0eq3zqayVzZSQ==
|
||||
|
||||
"@types/tapable@^1":
|
||||
version "1.0.7"
|
||||
resolved "https://registry.yarnpkg.com/@types/tapable/-/tapable-1.0.7.tgz#545158342f949e8fd3bfd813224971ecddc3fac4"
|
||||
|
@ -4474,10 +4484,10 @@
|
|||
dependencies:
|
||||
source-map "^0.6.1"
|
||||
|
||||
"@types/unist@*", "@types/unist@^2.0.0", "@types/unist@^2.0.2", "@types/unist@^2.0.3":
|
||||
version "2.0.3"
|
||||
resolved "https://registry.yarnpkg.com/@types/unist/-/unist-2.0.3.tgz#9c088679876f374eb5983f150d4787aa6fb32d7e"
|
||||
integrity sha512-FvUupuM3rlRsRtCN+fDudtmytGO6iHJuuRKS1Ss0pG5z8oX0diNEw94UEL7hgDbpN94rgaK5R7sWm6RrSkZuAQ==
|
||||
"@types/unist@*", "@types/unist@^2.0.0", "@types/unist@^2.0.2", "@types/unist@^2.0.3", "@types/unist@^2.0.6":
|
||||
version "2.0.6"
|
||||
resolved "https://registry.yarnpkg.com/@types/unist/-/unist-2.0.6.tgz#250a7b16c3b91f672a24552ec64678eeb1d3a08d"
|
||||
integrity sha512-PBjIUxZHOuj0R15/xuwJYjFi+KZdNFrehocChv4g5hu6aFroHue8m0lBP0POdK2nKzbw0cgV1mws8+V/JAcEkQ==
|
||||
|
||||
"@types/wait-on@^5.2.0":
|
||||
version "5.3.0"
|
||||
|
|
Loading…
Add table
Reference in a new issue