mirror of
https://github.com/facebook/docusaurus.git
synced 2025-06-18 02:32:28 +02:00
refactor(mdx-loader): read metadata from memory (loaded content) instead of fs (#10457)
* mdx loader shouldn't read metadata from file system but from memory * comments * refactor: apply lint autofix * apply same for blog * apply same for blog * refactor: apply lint autofix * apply same for pages
This commit is contained in:
parent
2aef92cb9e
commit
a4329d3388
8 changed files with 182 additions and 80 deletions
|
@ -0,0 +1,34 @@
|
|||
/**
|
||||
* Copyright (c) Facebook, Inc. and its affiliates.
|
||||
*
|
||||
* This source code is licensed under the MIT license found in the
|
||||
* LICENSE file in the root directory of this source tree.
|
||||
*/
|
||||
|
||||
import type {DocMetadata, LoadedContent} from '@docusaurus/plugin-content-docs';
|
||||
|
||||
function indexDocsBySource(content: LoadedContent): Map<string, DocMetadata> {
|
||||
const allDocs = content.loadedVersions.flatMap((v) => v.docs);
|
||||
return new Map(allDocs.map((doc) => [doc.source, doc]));
|
||||
}
|
||||
|
||||
// TODO this is bad, we should have a better way to do this (new lifecycle?)
|
||||
// The source to doc/permalink is a mutable map passed to the mdx loader
|
||||
// See https://github.com/facebook/docusaurus/pull/10457
|
||||
// See https://github.com/facebook/docusaurus/pull/10185
|
||||
export function createContentHelpers() {
|
||||
const sourceToDoc = new Map<string, DocMetadata>();
|
||||
const sourceToPermalink = new Map<string, string>();
|
||||
|
||||
// Mutable map update :/
|
||||
function updateContent(content: LoadedContent): void {
|
||||
sourceToDoc.clear();
|
||||
sourceToPermalink.clear();
|
||||
indexDocsBySource(content).forEach((value, key) => {
|
||||
sourceToDoc.set(key, value);
|
||||
sourceToPermalink.set(key, value.permalink);
|
||||
});
|
||||
}
|
||||
|
||||
return {updateContent, sourceToDoc, sourceToPermalink};
|
||||
}
|
|
@ -19,7 +19,6 @@ import {
|
|||
createSlugger,
|
||||
resolveMarkdownLinkPathname,
|
||||
DEFAULT_PLUGIN_ID,
|
||||
type SourceToPermalink,
|
||||
type TagsFile,
|
||||
} from '@docusaurus/utils';
|
||||
import {
|
||||
|
@ -54,6 +53,7 @@ import {
|
|||
import {createAllRoutes} from './routes';
|
||||
import {createSidebarsUtils} from './sidebars/utils';
|
||||
|
||||
import {createContentHelpers} from './contentHelpers';
|
||||
import type {
|
||||
PluginOptions,
|
||||
DocMetadataBase,
|
||||
|
@ -66,29 +66,6 @@ import type {LoadContext, Plugin} from '@docusaurus/types';
|
|||
import type {DocFile, FullVersion} from './types';
|
||||
import type {RuleSetRule} from 'webpack';
|
||||
|
||||
// TODO this is bad, we should have a better way to do this (new lifecycle?)
|
||||
// The source to permalink is currently a mutable map passed to the mdx loader
|
||||
// for link resolution
|
||||
// see https://github.com/facebook/docusaurus/pull/10185
|
||||
function createSourceToPermalinkHelper() {
|
||||
const sourceToPermalink: SourceToPermalink = new Map();
|
||||
|
||||
function computeSourceToPermalink(content: LoadedContent): SourceToPermalink {
|
||||
const allDocs = content.loadedVersions.flatMap((v) => v.docs);
|
||||
return new Map(allDocs.map(({source, permalink}) => [source, permalink]));
|
||||
}
|
||||
|
||||
// Mutable map update :/
|
||||
function update(content: LoadedContent): void {
|
||||
sourceToPermalink.clear();
|
||||
computeSourceToPermalink(content).forEach((value, key) => {
|
||||
sourceToPermalink.set(key, value);
|
||||
});
|
||||
}
|
||||
|
||||
return {get: () => sourceToPermalink, update};
|
||||
}
|
||||
|
||||
export default async function pluginContentDocs(
|
||||
context: LoadContext,
|
||||
options: PluginOptions,
|
||||
|
@ -115,7 +92,7 @@ export default async function pluginContentDocs(
|
|||
// TODO env should be injected into all plugins
|
||||
const env = process.env.NODE_ENV as DocEnv;
|
||||
|
||||
const sourceToPermalinkHelper = createSourceToPermalinkHelper();
|
||||
const contentHelpers = createContentHelpers();
|
||||
|
||||
async function createDocsMDXLoaderRule(): Promise<RuleSetRule> {
|
||||
const {
|
||||
|
@ -146,7 +123,15 @@ export default async function pluginContentDocs(
|
|||
// Note that metadataPath must be the same/in-sync as
|
||||
// the path from createData for each MDX.
|
||||
const aliasedPath = aliasedSitePath(mdxPath, siteDir);
|
||||
return path.join(dataDir, `${docuHash(aliasedPath)}.json`);
|
||||
const metadataPath = path.join(
|
||||
dataDir,
|
||||
`${docuHash(aliasedPath)}.json`,
|
||||
);
|
||||
const metadataContent = contentHelpers.sourceToDoc.get(aliasedPath);
|
||||
return {
|
||||
metadataPath,
|
||||
metadataContent,
|
||||
};
|
||||
},
|
||||
// Assets allow to convert some relative images paths to
|
||||
// require(...) calls
|
||||
|
@ -161,7 +146,7 @@ export default async function pluginContentDocs(
|
|||
);
|
||||
const permalink = resolveMarkdownLinkPathname(linkPathname, {
|
||||
sourceFilePath,
|
||||
sourceToPermalink: sourceToPermalinkHelper.get(),
|
||||
sourceToPermalink: contentHelpers.sourceToPermalink,
|
||||
siteDir,
|
||||
contentPaths: version,
|
||||
});
|
||||
|
@ -335,7 +320,7 @@ export default async function pluginContentDocs(
|
|||
},
|
||||
|
||||
async contentLoaded({content, actions}) {
|
||||
sourceToPermalinkHelper.update(content);
|
||||
contentHelpers.updateContent(content);
|
||||
|
||||
const versions: FullVersion[] = content.loadedVersions.map(toFullVersion);
|
||||
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue