diff --git a/packages/docusaurus-mdx-loader/README.md b/packages/docusaurus-mdx-loader/README.md index 97784c01ba..b3a4dc62ff 100644 --- a/packages/docusaurus-mdx-loader/README.md +++ b/packages/docusaurus-mdx-loader/README.md @@ -40,3 +40,6 @@ Array of rehype plugins to manipulate the MDXHAST ### `remarkPlugins` Array of remark plugins to manipulate the MDXAST + +### `metadataPath` +A function to provide the metadataPath depending on current loaded MDX path that will be exported as the MDX metadata. diff --git a/packages/docusaurus-mdx-loader/package.json b/packages/docusaurus-mdx-loader/package.json index 252e2be3d7..69e9c08e73 100644 --- a/packages/docusaurus-mdx-loader/package.json +++ b/packages/docusaurus-mdx-loader/package.json @@ -13,6 +13,7 @@ "@mdx-js/mdx": "^1.5.1", "@mdx-js/react": "^1.5.1", "escape-html": "^1.0.3", + "fs-extra": "^8.1.0", "github-slugger": "^1.2.1", "gray-matter": "^4.0.2", "loader-utils": "^1.2.3", diff --git a/packages/docusaurus-mdx-loader/src/index.js b/packages/docusaurus-mdx-loader/src/index.js index c276e52630..50ad93ce9b 100644 --- a/packages/docusaurus-mdx-loader/src/index.js +++ b/packages/docusaurus-mdx-loader/src/index.js @@ -6,6 +6,7 @@ */ const {getOptions} = require('loader-utils'); +const {readFile} = require('fs-extra'); const mdx = require('@mdx-js/mdx'); const emoji = require('remark-emoji'); const slug = require('remark-slug'); @@ -43,11 +44,25 @@ module.exports = async function(fileString) { return callback(err); } + let exportStr = `export const frontMatter = ${stringifyObject(data)};`; + + // Read metadata for this MDX and export it + if (options.metadataPath && typeof options.metadataPath === 'function') { + const metadataPath = options.metadataPath(this.resourcePath); + + if (metadataPath) { + // Add as dependency of this loader result so that we can recompile if metadata is changed + this.addDependency(metadataPath); + const metadata = await readFile(metadataPath, 'utf8'); + exportStr += `\nexport const metadata = ${metadata};`; + } + } + const code = ` import React from 'react'; import { mdx } from '@mdx-js/react'; - export const frontMatter = ${stringifyObject(data)}; + ${exportStr} ${result} `; diff --git a/packages/docusaurus-plugin-content-blog/src/index.ts b/packages/docusaurus-plugin-content-blog/src/index.ts index fab06349ed..8fae5da393 100644 --- a/packages/docusaurus-plugin-content-blog/src/index.ts +++ b/packages/docusaurus-plugin-content-blog/src/index.ts @@ -8,7 +8,7 @@ import fs from 'fs-extra'; import _ from 'lodash'; import path from 'path'; -import {normalizeUrl, docuHash} from '@docusaurus/utils'; +import {normalizeUrl, docuHash, aliasedSitePath} from '@docusaurus/utils'; import { PluginOptions, @@ -69,9 +69,10 @@ export default function pluginContentBlog( opts: Partial, ): Plugin { const options: PluginOptions = {...DEFAULT_OPTIONS, ...opts}; - const contentPath = path.resolve(context.siteDir, options.path); + const {siteDir, generatedFilesDir} = context; + const contentPath = path.resolve(siteDir, options.path); const dataDir = path.join( - context.generatedFilesDir, + generatedFilesDir, 'docusaurus-plugin-content-blog', ); @@ -231,7 +232,7 @@ export default function pluginContentBlog( blogPosts.map(async blogPost => { const {id, metadata} = blogPost; await createData( - // Note that this created data path must be in sync with markdownLoader.ts metadataPath + // Note that this created data path must be in sync with metadataPath provided to mdx-loader `${docuHash(metadata.source)}.json`, JSON.stringify(metadata, null, 2), ); @@ -373,13 +374,19 @@ export default function pluginContentBlog( options: { remarkPlugins, rehypePlugins, + // Note that metadataPath must be the same/ in-sync as the path from createData for each MDX + metadataPath: (mdxPath: string) => { + const aliasedSource = aliasedSitePath(mdxPath, siteDir); + return path.join( + dataDir, + `${docuHash(aliasedSource)}.json`, + ); + }, }, }, { loader: path.resolve(__dirname, './markdownLoader.js'), options: { - dataDir, - siteDir: context.siteDir, truncateMarker, }, }, diff --git a/packages/docusaurus-plugin-content-blog/src/markdownLoader.ts b/packages/docusaurus-plugin-content-blog/src/markdownLoader.ts index a94a6ff22c..97798d473b 100644 --- a/packages/docusaurus-plugin-content-blog/src/markdownLoader.ts +++ b/packages/docusaurus-plugin-content-blog/src/markdownLoader.ts @@ -8,14 +8,11 @@ const {parseQuery, getOptions} = require('loader-utils'); import {loader} from 'webpack'; import {truncate} from './blogUtils'; -import path from 'path'; -import {readFile} from 'fs-extra'; -import {aliasedSitePath, docuHash} from '@docusaurus/utils'; export = function(fileString: string) { const callback = this.async(); - const {truncateMarker, siteDir, dataDir} = getOptions(this); + const {truncateMarker}: {truncateMarker: RegExp | string} = getOptions(this); let finalContent = fileString; @@ -24,20 +21,5 @@ export = function(fileString: string) { if (truncated) { finalContent = truncate(fileString, truncateMarker); } - - // Read metadata & then embed it to this markdown content - // Note that metadataPath must be the same/ in-sync as the path from createData - const aliasedSource = aliasedSitePath(this.resourcePath, siteDir); - const metadataPath = path.join(dataDir, `${docuHash(aliasedSource)}.json`); - - // Add metadataPath as dependency of this loader result so that we can recompile if metadata is changed - this.addDependency(metadataPath); - - readFile(metadataPath, 'utf8', function(err, metadata) { - if (err) return callback && callback(err); - - const metadataStr = `export const metadata = ${metadata};`; - // We need to add two lines break so that mdx won't mistake it as part of previous paragraph - callback && callback(null, finalContent + '\n\n' + metadataStr); - }); + return callback && callback(null, finalContent); } as loader.Loader; diff --git a/packages/docusaurus-plugin-content-docs/src/index.ts b/packages/docusaurus-plugin-content-docs/src/index.ts index 2a96ab2d36..d5185d1238 100644 --- a/packages/docusaurus-plugin-content-docs/src/index.ts +++ b/packages/docusaurus-plugin-content-docs/src/index.ts @@ -9,7 +9,12 @@ import _ from 'lodash'; import globby from 'globby'; import fs from 'fs-extra'; import path from 'path'; -import {normalizeUrl, docuHash, objectWithKeySorted} from '@docusaurus/utils'; +import { + normalizeUrl, + docuHash, + objectWithKeySorted, + aliasedSitePath, +} from '@docusaurus/utils'; import {LoadContext, Plugin, RouteConfig} from '@docusaurus/types'; import createOrder from './order'; @@ -285,7 +290,7 @@ export default function pluginContentDocs( const routes = await Promise.all( metadataItems.map(async metadataItem => { await createData( - // Note that this created data path must be in sync with markdown/index.ts metadataPath + // Note that this created data path must be in sync with metadataPath provided to mdx-loader `${docuHash(metadataItem.source)}.json`, JSON.stringify(metadataItem, null, 2), ); @@ -404,13 +409,20 @@ export default function pluginContentDocs( options: { remarkPlugins, rehypePlugins, + metadataPath: (mdxPath: string) => { + // Note that metadataPath must be the same/ in-sync as the path from createData for each MDX + const aliasedSource = aliasedSitePath(mdxPath, siteDir); + return path.join( + dataDir, + `${docuHash(aliasedSource)}.json`, + ); + }, }, }, { loader: path.resolve(__dirname, './markdown/index.js'), options: { siteDir, - dataDir, docsDir, sourceToPermalink: sourceToPermalink, versionedDir, diff --git a/packages/docusaurus-plugin-content-docs/src/markdown/index.ts b/packages/docusaurus-plugin-content-docs/src/markdown/index.ts index efb9740067..6f842ef0d2 100644 --- a/packages/docusaurus-plugin-content-docs/src/markdown/index.ts +++ b/packages/docusaurus-plugin-content-docs/src/markdown/index.ts @@ -5,46 +5,25 @@ * LICENSE file in the root directory of this source tree. */ -import path from 'path'; -import {readFile} from 'fs-extra'; import {getOptions} from 'loader-utils'; import {loader} from 'webpack'; import linkify from './linkify'; -import {docuHash, aliasedSitePath} from '@docusaurus/utils'; export = function(fileString: string) { const callback = this.async(); - const { - dataDir, - docsDir, - siteDir, - versionedDir, - sourceToPermalink, - } = getOptions(this); - - // Replace all markdown linking to correct url - const linkifiedStr = linkify( - fileString, - this.resourcePath, - docsDir, - siteDir, - sourceToPermalink, - versionedDir, + const {docsDir, siteDir, versionedDir, sourceToPermalink} = getOptions(this); + return ( + callback && + callback( + null, + linkify( + fileString, + this.resourcePath, + docsDir, + siteDir, + sourceToPermalink, + versionedDir, + ), + ) ); - - // Read metadata & then embed it to this markdown content - // Note that metadataPath must be the same/ in-sync as the path from createData - const aliasedSource = aliasedSitePath(this.resourcePath, siteDir); - const metadataPath = path.join(dataDir, `${docuHash(aliasedSource)}.json`); - - // Add metadataPath as dependency of this loader result so that we can recompile if metadata is changed - this.addDependency(metadataPath); - - readFile(metadataPath, 'utf8', function(err, metadata) { - if (err) return callback && callback(err); - - const metadataStr = `export const metadata = ${metadata}`; - // We need to add two lines break so that mdx won't mistake it as part of previous paragraph - callback && callback(null, linkifiedStr + '\n\n' + metadataStr); - }); } as loader.Loader;