mirror of
https://github.com/facebook/docusaurus.git
synced 2025-06-06 04:42:40 +02:00
refactor(mdx-loader): read metadata from memory (loaded content) instead of fs (#10457)
* mdx loader shouldn't read metadata from file system but from memory * comments * refactor: apply lint autofix * apply same for blog * apply same for blog * refactor: apply lint autofix * apply same for pages
This commit is contained in:
parent
2aef92cb9e
commit
a4329d3388
8 changed files with 182 additions and 80 deletions
|
@ -0,0 +1,35 @@
|
|||
/**
|
||||
* Copyright (c) Facebook, Inc. and its affiliates.
|
||||
*
|
||||
* This source code is licensed under the MIT license found in the
|
||||
* LICENSE file in the root directory of this source tree.
|
||||
*/
|
||||
|
||||
import type {BlogContent, BlogPost} from '@docusaurus/plugin-content-blog';
|
||||
|
||||
function indexBlogPostsBySource(content: BlogContent): Map<string, BlogPost> {
|
||||
return new Map(
|
||||
content.blogPosts.map((blogPost) => [blogPost.metadata.source, blogPost]),
|
||||
);
|
||||
}
|
||||
|
||||
// TODO this is bad, we should have a better way to do this (new lifecycle?)
|
||||
// The source to blog/permalink is a mutable map passed to the mdx loader
|
||||
// See https://github.com/facebook/docusaurus/pull/10457
|
||||
// See https://github.com/facebook/docusaurus/pull/10185
|
||||
export function createContentHelpers() {
|
||||
const sourceToBlogPost = new Map<string, BlogPost>();
|
||||
const sourceToPermalink = new Map<string, string>();
|
||||
|
||||
// Mutable map update :/
|
||||
function updateContent(content: BlogContent): void {
|
||||
sourceToBlogPost.clear();
|
||||
sourceToPermalink.clear();
|
||||
indexBlogPostsBySource(content).forEach((value, key) => {
|
||||
sourceToBlogPost.set(key, value);
|
||||
sourceToPermalink.set(key, value.metadata.permalink);
|
||||
});
|
||||
}
|
||||
|
||||
return {updateContent, sourceToBlogPost, sourceToPermalink};
|
||||
}
|
|
@ -19,7 +19,6 @@ import {
|
|||
getDataFilePath,
|
||||
DEFAULT_PLUGIN_ID,
|
||||
resolveMarkdownLinkPathname,
|
||||
type SourceToPermalink,
|
||||
} from '@docusaurus/utils';
|
||||
import {getTagsFilePathsToWatch} from '@docusaurus/utils-validation';
|
||||
import {
|
||||
|
@ -40,6 +39,7 @@ import {createBlogFeedFiles, createFeedHtmlHeadTags} from './feed';
|
|||
|
||||
import {createAllRoutes} from './routes';
|
||||
import {checkAuthorsMapPermalinkCollisions, getAuthorsMap} from './authorsMap';
|
||||
import {createContentHelpers} from './contentHelpers';
|
||||
import type {BlogContentPaths, BlogMarkdownLoaderOptions} from './types';
|
||||
import type {LoadContext, Plugin} from '@docusaurus/types';
|
||||
import type {
|
||||
|
@ -55,33 +55,6 @@ import type {RuleSetRule, RuleSetUseItem} from 'webpack';
|
|||
|
||||
const PluginName = 'docusaurus-plugin-content-blog';
|
||||
|
||||
// TODO this is bad, we should have a better way to do this (new lifecycle?)
|
||||
// The source to permalink is currently a mutable map passed to the mdx loader
|
||||
// for link resolution
|
||||
// see https://github.com/facebook/docusaurus/pull/10185
|
||||
function createSourceToPermalinkHelper() {
|
||||
const sourceToPermalink: SourceToPermalink = new Map();
|
||||
|
||||
function computeSourceToPermalink(content: BlogContent): SourceToPermalink {
|
||||
return new Map(
|
||||
content.blogPosts.map(({metadata: {source, permalink}}) => [
|
||||
source,
|
||||
permalink,
|
||||
]),
|
||||
);
|
||||
}
|
||||
|
||||
// Mutable map update :/
|
||||
function update(content: BlogContent): void {
|
||||
sourceToPermalink.clear();
|
||||
computeSourceToPermalink(content).forEach((value, key) => {
|
||||
sourceToPermalink.set(key, value);
|
||||
});
|
||||
}
|
||||
|
||||
return {get: () => sourceToPermalink, update};
|
||||
}
|
||||
|
||||
export default async function pluginContentBlog(
|
||||
context: LoadContext,
|
||||
options: PluginOptions,
|
||||
|
@ -128,7 +101,7 @@ export default async function pluginContentBlog(
|
|||
contentPaths,
|
||||
});
|
||||
|
||||
const sourceToPermalinkHelper = createSourceToPermalinkHelper();
|
||||
const contentHelpers = createContentHelpers();
|
||||
|
||||
async function createBlogMDXLoaderRule(): Promise<RuleSetRule> {
|
||||
const {
|
||||
|
@ -162,7 +135,16 @@ export default async function pluginContentBlog(
|
|||
// Note that metadataPath must be the same/in-sync as
|
||||
// the path from createData for each MDX.
|
||||
const aliasedPath = aliasedSitePath(mdxPath, siteDir);
|
||||
return path.join(dataDir, `${docuHash(aliasedPath)}.json`);
|
||||
const metadataPath = path.join(
|
||||
dataDir,
|
||||
`${docuHash(aliasedPath)}.json`,
|
||||
);
|
||||
const metadataContent =
|
||||
contentHelpers.sourceToBlogPost.get(aliasedPath)!.metadata;
|
||||
return {
|
||||
metadataPath,
|
||||
metadataContent,
|
||||
};
|
||||
},
|
||||
// For blog posts a title in markdown is always removed
|
||||
// Blog posts title are rendered separately
|
||||
|
@ -184,7 +166,7 @@ export default async function pluginContentBlog(
|
|||
resolveMarkdownLink: ({linkPathname, sourceFilePath}) => {
|
||||
const permalink = resolveMarkdownLinkPathname(linkPathname, {
|
||||
sourceFilePath,
|
||||
sourceToPermalink: sourceToPermalinkHelper.get(),
|
||||
sourceToPermalink: contentHelpers.sourceToPermalink,
|
||||
siteDir,
|
||||
contentPaths,
|
||||
});
|
||||
|
@ -352,7 +334,7 @@ export default async function pluginContentBlog(
|
|||
},
|
||||
|
||||
async contentLoaded({content, actions}) {
|
||||
sourceToPermalinkHelper.update(content);
|
||||
contentHelpers.updateContent(content);
|
||||
|
||||
await createAllRoutes({
|
||||
baseUrl,
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue