mirror of
https://github.com/facebook/docusaurus.git
synced 2025-05-10 15:47:23 +02:00
fix(v2): move metadata export after compiling MDX to avoid weird MDX parsing error. (#2105)
* fix(v2): move metadata export to mdx-loader to prevent any weird mdx parsing * refactor * nits * nits * nits
This commit is contained in:
parent
1f0eb37e19
commit
ace93c5a14
7 changed files with 64 additions and 65 deletions
|
@ -40,3 +40,6 @@ Array of rehype plugins to manipulate the MDXHAST
|
|||
### `remarkPlugins`
|
||||
|
||||
Array of remark plugins to manipulate the MDXAST
|
||||
|
||||
### `metadataPath`
|
||||
A function to provide the metadataPath depending on current loaded MDX path that will be exported as the MDX metadata.
|
||||
|
|
|
@ -13,6 +13,7 @@
|
|||
"@mdx-js/mdx": "^1.5.1",
|
||||
"@mdx-js/react": "^1.5.1",
|
||||
"escape-html": "^1.0.3",
|
||||
"fs-extra": "^8.1.0",
|
||||
"github-slugger": "^1.2.1",
|
||||
"gray-matter": "^4.0.2",
|
||||
"loader-utils": "^1.2.3",
|
||||
|
|
|
@ -6,6 +6,7 @@
|
|||
*/
|
||||
|
||||
const {getOptions} = require('loader-utils');
|
||||
const {readFile} = require('fs-extra');
|
||||
const mdx = require('@mdx-js/mdx');
|
||||
const emoji = require('remark-emoji');
|
||||
const slug = require('remark-slug');
|
||||
|
@ -43,11 +44,25 @@ module.exports = async function(fileString) {
|
|||
return callback(err);
|
||||
}
|
||||
|
||||
let exportStr = `export const frontMatter = ${stringifyObject(data)};`;
|
||||
|
||||
// Read metadata for this MDX and export it
|
||||
if (options.metadataPath && typeof options.metadataPath === 'function') {
|
||||
const metadataPath = options.metadataPath(this.resourcePath);
|
||||
|
||||
if (metadataPath) {
|
||||
// Add as dependency of this loader result so that we can recompile if metadata is changed
|
||||
this.addDependency(metadataPath);
|
||||
const metadata = await readFile(metadataPath, 'utf8');
|
||||
exportStr += `\nexport const metadata = ${metadata};`;
|
||||
}
|
||||
}
|
||||
|
||||
const code = `
|
||||
import React from 'react';
|
||||
import { mdx } from '@mdx-js/react';
|
||||
|
||||
export const frontMatter = ${stringifyObject(data)};
|
||||
${exportStr}
|
||||
${result}
|
||||
`;
|
||||
|
||||
|
|
|
@ -8,7 +8,7 @@
|
|||
import fs from 'fs-extra';
|
||||
import _ from 'lodash';
|
||||
import path from 'path';
|
||||
import {normalizeUrl, docuHash} from '@docusaurus/utils';
|
||||
import {normalizeUrl, docuHash, aliasedSitePath} from '@docusaurus/utils';
|
||||
|
||||
import {
|
||||
PluginOptions,
|
||||
|
@ -69,9 +69,10 @@ export default function pluginContentBlog(
|
|||
opts: Partial<PluginOptions>,
|
||||
): Plugin<BlogContent | null> {
|
||||
const options: PluginOptions = {...DEFAULT_OPTIONS, ...opts};
|
||||
const contentPath = path.resolve(context.siteDir, options.path);
|
||||
const {siteDir, generatedFilesDir} = context;
|
||||
const contentPath = path.resolve(siteDir, options.path);
|
||||
const dataDir = path.join(
|
||||
context.generatedFilesDir,
|
||||
generatedFilesDir,
|
||||
'docusaurus-plugin-content-blog',
|
||||
);
|
||||
|
||||
|
@ -231,7 +232,7 @@ export default function pluginContentBlog(
|
|||
blogPosts.map(async blogPost => {
|
||||
const {id, metadata} = blogPost;
|
||||
await createData(
|
||||
// Note that this created data path must be in sync with markdownLoader.ts metadataPath
|
||||
// Note that this created data path must be in sync with metadataPath provided to mdx-loader
|
||||
`${docuHash(metadata.source)}.json`,
|
||||
JSON.stringify(metadata, null, 2),
|
||||
);
|
||||
|
@ -373,13 +374,19 @@ export default function pluginContentBlog(
|
|||
options: {
|
||||
remarkPlugins,
|
||||
rehypePlugins,
|
||||
// Note that metadataPath must be the same/ in-sync as the path from createData for each MDX
|
||||
metadataPath: (mdxPath: string) => {
|
||||
const aliasedSource = aliasedSitePath(mdxPath, siteDir);
|
||||
return path.join(
|
||||
dataDir,
|
||||
`${docuHash(aliasedSource)}.json`,
|
||||
);
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
loader: path.resolve(__dirname, './markdownLoader.js'),
|
||||
options: {
|
||||
dataDir,
|
||||
siteDir: context.siteDir,
|
||||
truncateMarker,
|
||||
},
|
||||
},
|
||||
|
|
|
@ -8,14 +8,11 @@
|
|||
const {parseQuery, getOptions} = require('loader-utils');
|
||||
import {loader} from 'webpack';
|
||||
import {truncate} from './blogUtils';
|
||||
import path from 'path';
|
||||
import {readFile} from 'fs-extra';
|
||||
import {aliasedSitePath, docuHash} from '@docusaurus/utils';
|
||||
|
||||
export = function(fileString: string) {
|
||||
const callback = this.async();
|
||||
|
||||
const {truncateMarker, siteDir, dataDir} = getOptions(this);
|
||||
const {truncateMarker}: {truncateMarker: RegExp | string} = getOptions(this);
|
||||
|
||||
let finalContent = fileString;
|
||||
|
||||
|
@ -24,20 +21,5 @@ export = function(fileString: string) {
|
|||
if (truncated) {
|
||||
finalContent = truncate(fileString, truncateMarker);
|
||||
}
|
||||
|
||||
// Read metadata & then embed it to this markdown content
|
||||
// Note that metadataPath must be the same/ in-sync as the path from createData
|
||||
const aliasedSource = aliasedSitePath(this.resourcePath, siteDir);
|
||||
const metadataPath = path.join(dataDir, `${docuHash(aliasedSource)}.json`);
|
||||
|
||||
// Add metadataPath as dependency of this loader result so that we can recompile if metadata is changed
|
||||
this.addDependency(metadataPath);
|
||||
|
||||
readFile(metadataPath, 'utf8', function(err, metadata) {
|
||||
if (err) return callback && callback(err);
|
||||
|
||||
const metadataStr = `export const metadata = ${metadata};`;
|
||||
// We need to add two lines break so that mdx won't mistake it as part of previous paragraph
|
||||
callback && callback(null, finalContent + '\n\n' + metadataStr);
|
||||
});
|
||||
return callback && callback(null, finalContent);
|
||||
} as loader.Loader;
|
||||
|
|
|
@ -9,7 +9,12 @@ import _ from 'lodash';
|
|||
import globby from 'globby';
|
||||
import fs from 'fs-extra';
|
||||
import path from 'path';
|
||||
import {normalizeUrl, docuHash, objectWithKeySorted} from '@docusaurus/utils';
|
||||
import {
|
||||
normalizeUrl,
|
||||
docuHash,
|
||||
objectWithKeySorted,
|
||||
aliasedSitePath,
|
||||
} from '@docusaurus/utils';
|
||||
import {LoadContext, Plugin, RouteConfig} from '@docusaurus/types';
|
||||
|
||||
import createOrder from './order';
|
||||
|
@ -285,7 +290,7 @@ export default function pluginContentDocs(
|
|||
const routes = await Promise.all(
|
||||
metadataItems.map(async metadataItem => {
|
||||
await createData(
|
||||
// Note that this created data path must be in sync with markdown/index.ts metadataPath
|
||||
// Note that this created data path must be in sync with metadataPath provided to mdx-loader
|
||||
`${docuHash(metadataItem.source)}.json`,
|
||||
JSON.stringify(metadataItem, null, 2),
|
||||
);
|
||||
|
@ -404,13 +409,20 @@ export default function pluginContentDocs(
|
|||
options: {
|
||||
remarkPlugins,
|
||||
rehypePlugins,
|
||||
metadataPath: (mdxPath: string) => {
|
||||
// Note that metadataPath must be the same/ in-sync as the path from createData for each MDX
|
||||
const aliasedSource = aliasedSitePath(mdxPath, siteDir);
|
||||
return path.join(
|
||||
dataDir,
|
||||
`${docuHash(aliasedSource)}.json`,
|
||||
);
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
loader: path.resolve(__dirname, './markdown/index.js'),
|
||||
options: {
|
||||
siteDir,
|
||||
dataDir,
|
||||
docsDir,
|
||||
sourceToPermalink: sourceToPermalink,
|
||||
versionedDir,
|
||||
|
|
|
@ -5,46 +5,25 @@
|
|||
* LICENSE file in the root directory of this source tree.
|
||||
*/
|
||||
|
||||
import path from 'path';
|
||||
import {readFile} from 'fs-extra';
|
||||
import {getOptions} from 'loader-utils';
|
||||
import {loader} from 'webpack';
|
||||
import linkify from './linkify';
|
||||
import {docuHash, aliasedSitePath} from '@docusaurus/utils';
|
||||
|
||||
export = function(fileString: string) {
|
||||
const callback = this.async();
|
||||
const {
|
||||
dataDir,
|
||||
docsDir,
|
||||
siteDir,
|
||||
versionedDir,
|
||||
sourceToPermalink,
|
||||
} = getOptions(this);
|
||||
|
||||
// Replace all markdown linking to correct url
|
||||
const linkifiedStr = linkify(
|
||||
const {docsDir, siteDir, versionedDir, sourceToPermalink} = getOptions(this);
|
||||
return (
|
||||
callback &&
|
||||
callback(
|
||||
null,
|
||||
linkify(
|
||||
fileString,
|
||||
this.resourcePath,
|
||||
docsDir,
|
||||
siteDir,
|
||||
sourceToPermalink,
|
||||
versionedDir,
|
||||
),
|
||||
)
|
||||
);
|
||||
|
||||
// Read metadata & then embed it to this markdown content
|
||||
// Note that metadataPath must be the same/ in-sync as the path from createData
|
||||
const aliasedSource = aliasedSitePath(this.resourcePath, siteDir);
|
||||
const metadataPath = path.join(dataDir, `${docuHash(aliasedSource)}.json`);
|
||||
|
||||
// Add metadataPath as dependency of this loader result so that we can recompile if metadata is changed
|
||||
this.addDependency(metadataPath);
|
||||
|
||||
readFile(metadataPath, 'utf8', function(err, metadata) {
|
||||
if (err) return callback && callback(err);
|
||||
|
||||
const metadataStr = `export const metadata = ${metadata}`;
|
||||
// We need to add two lines break so that mdx won't mistake it as part of previous paragraph
|
||||
callback && callback(null, linkifiedStr + '\n\n' + metadataStr);
|
||||
});
|
||||
} as loader.Loader;
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue