mirror of
https://github.com/facebook/docusaurus.git
synced 2025-06-18 10:42:31 +02:00
refactor(mdx-loader): refactor mdx-loader, expose loader creation utils (#10450)
This commit is contained in:
parent
db6c2af160
commit
d5885c0c5d
13 changed files with 494 additions and 413 deletions
48
packages/docusaurus-mdx-loader/src/createMDXLoader.ts
Normal file
48
packages/docusaurus-mdx-loader/src/createMDXLoader.ts
Normal file
|
@ -0,0 +1,48 @@
|
||||||
|
/**
|
||||||
|
* Copyright (c) Facebook, Inc. and its affiliates.
|
||||||
|
*
|
||||||
|
* This source code is licensed under the MIT license found in the
|
||||||
|
* LICENSE file in the root directory of this source tree.
|
||||||
|
*/
|
||||||
|
|
||||||
|
import {createProcessors} from './processor';
|
||||||
|
import type {Options} from './loader';
|
||||||
|
import type {RuleSetRule, RuleSetUseItem} from 'webpack';
|
||||||
|
|
||||||
|
async function enhancedOptions(options: Options): Promise<Options> {
|
||||||
|
// Because Jest doesn't like ESM / createProcessors()
|
||||||
|
if (process.env.N0DE_ENV === 'test' || process.env.JEST_WORKER_ID) {
|
||||||
|
return options;
|
||||||
|
}
|
||||||
|
|
||||||
|
// We create the processor earlier here, to avoid the lazy processor creating
|
||||||
|
// Lazy creation messes-up with Rsdoctor ability to measure mdx-loader perf
|
||||||
|
const newOptions: Options = options.processors
|
||||||
|
? options
|
||||||
|
: {...options, processors: await createProcessors({options})};
|
||||||
|
|
||||||
|
return newOptions;
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function createMDXLoaderItem(
|
||||||
|
options: Options,
|
||||||
|
): Promise<RuleSetUseItem> {
|
||||||
|
return {
|
||||||
|
loader: require.resolve('@docusaurus/mdx-loader'),
|
||||||
|
options: await enhancedOptions(options),
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function createMDXLoaderRule({
|
||||||
|
include,
|
||||||
|
options,
|
||||||
|
}: {
|
||||||
|
include: RuleSetRule['include'];
|
||||||
|
options: Options;
|
||||||
|
}): Promise<RuleSetRule> {
|
||||||
|
return {
|
||||||
|
test: /\.mdx?$/i,
|
||||||
|
include,
|
||||||
|
use: [await createMDXLoaderItem(options)],
|
||||||
|
};
|
||||||
|
}
|
|
@ -9,6 +9,8 @@ import {mdxLoader} from './loader';
|
||||||
|
|
||||||
import type {TOCItem as TOCItemImported} from './remark/toc/types';
|
import type {TOCItem as TOCItemImported} from './remark/toc/types';
|
||||||
|
|
||||||
|
export {createMDXLoaderRule, createMDXLoaderItem} from './createMDXLoader';
|
||||||
|
|
||||||
export default mdxLoader;
|
export default mdxLoader;
|
||||||
|
|
||||||
export type TOCItem = TOCItemImported;
|
export type TOCItem = TOCItemImported;
|
||||||
|
|
|
@ -5,20 +5,25 @@
|
||||||
* LICENSE file in the root directory of this source tree.
|
* LICENSE file in the root directory of this source tree.
|
||||||
*/
|
*/
|
||||||
|
|
||||||
import fs from 'fs-extra';
|
|
||||||
import logger from '@docusaurus/logger';
|
import logger from '@docusaurus/logger';
|
||||||
import {
|
import {
|
||||||
DEFAULT_PARSE_FRONT_MATTER,
|
DEFAULT_PARSE_FRONT_MATTER,
|
||||||
escapePath,
|
|
||||||
getFileLoaderUtils,
|
getFileLoaderUtils,
|
||||||
getWebpackLoaderCompilerName,
|
getWebpackLoaderCompilerName,
|
||||||
} from '@docusaurus/utils';
|
} from '@docusaurus/utils';
|
||||||
import stringifyObject from 'stringify-object';
|
import stringifyObject from 'stringify-object';
|
||||||
import preprocessor from './preprocessor';
|
import {
|
||||||
import {validateMDXFrontMatter} from './frontMatter';
|
compileToJSX,
|
||||||
import {createProcessorCached} from './processor';
|
createAssetsExportCode,
|
||||||
|
extractContentTitleData,
|
||||||
|
readMetadataPath,
|
||||||
|
} from './utils';
|
||||||
|
import type {
|
||||||
|
SimpleProcessors,
|
||||||
|
MDXOptions,
|
||||||
|
SimpleProcessorResult,
|
||||||
|
} from './processor';
|
||||||
import type {ResolveMarkdownLink} from './remark/resolveMarkdownLinks';
|
import type {ResolveMarkdownLink} from './remark/resolveMarkdownLinks';
|
||||||
import type {MDXOptions} from './processor';
|
|
||||||
|
|
||||||
import type {MarkdownConfig} from '@docusaurus/types';
|
import type {MarkdownConfig} from '@docusaurus/types';
|
||||||
import type {LoaderContext} from 'webpack';
|
import type {LoaderContext} from 'webpack';
|
||||||
|
@ -43,98 +48,11 @@ export type Options = Partial<MDXOptions> & {
|
||||||
metadata: {[key: string]: unknown};
|
metadata: {[key: string]: unknown};
|
||||||
}) => {[key: string]: unknown};
|
}) => {[key: string]: unknown};
|
||||||
resolveMarkdownLink?: ResolveMarkdownLink;
|
resolveMarkdownLink?: ResolveMarkdownLink;
|
||||||
|
|
||||||
|
// Will usually be created by "createMDXLoaderItem"
|
||||||
|
processors?: SimpleProcessors;
|
||||||
};
|
};
|
||||||
|
|
||||||
/**
|
|
||||||
* When this throws, it generally means that there's no metadata file associated
|
|
||||||
* with this MDX document. It can happen when using MDX partials (usually
|
|
||||||
* starting with _). That's why it's important to provide the `isMDXPartial`
|
|
||||||
* function in config
|
|
||||||
*/
|
|
||||||
async function readMetadataPath(metadataPath: string) {
|
|
||||||
try {
|
|
||||||
return await fs.readFile(metadataPath, 'utf8');
|
|
||||||
} catch (err) {
|
|
||||||
logger.error`MDX loader can't read MDX metadata file path=${metadataPath}. Maybe the isMDXPartial option function was not provided?`;
|
|
||||||
throw err;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Converts assets an object with Webpack require calls code.
|
|
||||||
* This is useful for mdx files to reference co-located assets using relative
|
|
||||||
* paths. Those assets should enter the Webpack assets pipeline and be hashed.
|
|
||||||
* For now, we only handle that for images and paths starting with `./`:
|
|
||||||
*
|
|
||||||
* `{image: "./myImage.png"}` => `{image: require("./myImage.png")}`
|
|
||||||
*/
|
|
||||||
function createAssetsExportCode({
|
|
||||||
assets,
|
|
||||||
inlineMarkdownAssetImageFileLoader,
|
|
||||||
}: {
|
|
||||||
assets: unknown;
|
|
||||||
inlineMarkdownAssetImageFileLoader: string;
|
|
||||||
}) {
|
|
||||||
if (
|
|
||||||
typeof assets !== 'object' ||
|
|
||||||
!assets ||
|
|
||||||
Object.keys(assets).length === 0
|
|
||||||
) {
|
|
||||||
return 'undefined';
|
|
||||||
}
|
|
||||||
|
|
||||||
// TODO implementation can be completed/enhanced
|
|
||||||
function createAssetValueCode(assetValue: unknown): string | undefined {
|
|
||||||
if (Array.isArray(assetValue)) {
|
|
||||||
const arrayItemCodes = assetValue.map(
|
|
||||||
(item: unknown) => createAssetValueCode(item) ?? 'undefined',
|
|
||||||
);
|
|
||||||
return `[${arrayItemCodes.join(', ')}]`;
|
|
||||||
}
|
|
||||||
// Only process string values starting with ./
|
|
||||||
// We could enhance this logic and check if file exists on disc?
|
|
||||||
if (typeof assetValue === 'string' && assetValue.startsWith('./')) {
|
|
||||||
// TODO do we have other use-cases than image assets?
|
|
||||||
// Probably not worth adding more support, as we want to move to Webpack 5 new asset system (https://github.com/facebook/docusaurus/pull/4708)
|
|
||||||
return `require("${inlineMarkdownAssetImageFileLoader}${escapePath(
|
|
||||||
assetValue,
|
|
||||||
)}").default`;
|
|
||||||
}
|
|
||||||
return undefined;
|
|
||||||
}
|
|
||||||
|
|
||||||
const assetEntries = Object.entries(assets);
|
|
||||||
|
|
||||||
const codeLines = assetEntries
|
|
||||||
.map(([key, value]: [string, unknown]) => {
|
|
||||||
const assetRequireCode = createAssetValueCode(value);
|
|
||||||
return assetRequireCode ? `"${key}": ${assetRequireCode},` : undefined;
|
|
||||||
})
|
|
||||||
.filter(Boolean);
|
|
||||||
|
|
||||||
return `{\n${codeLines.join('\n')}\n}`;
|
|
||||||
}
|
|
||||||
|
|
||||||
// TODO temporary, remove this after v3.1?
|
|
||||||
// Some plugin authors use our mdx-loader, despite it not being public API
|
|
||||||
// see https://github.com/facebook/docusaurus/issues/8298
|
|
||||||
function ensureMarkdownConfig(reqOptions: Options) {
|
|
||||||
if (!reqOptions.markdownConfig) {
|
|
||||||
throw new Error(
|
|
||||||
'Docusaurus v3+ requires MDX loader options.markdownConfig - plugin authors using the MDX loader should make sure to provide that option',
|
|
||||||
);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* data.contentTitle is set by the remark contentTitle plugin
|
|
||||||
*/
|
|
||||||
function extractContentTitleData(data: {
|
|
||||||
[key: string]: unknown;
|
|
||||||
}): string | undefined {
|
|
||||||
return data.contentTitle as string | undefined;
|
|
||||||
}
|
|
||||||
|
|
||||||
export async function mdxLoader(
|
export async function mdxLoader(
|
||||||
this: LoaderContext<Options>,
|
this: LoaderContext<Options>,
|
||||||
fileContent: string,
|
fileContent: string,
|
||||||
|
@ -144,59 +62,25 @@ export async function mdxLoader(
|
||||||
const filePath = this.resourcePath;
|
const filePath = this.resourcePath;
|
||||||
const options: Options = this.getOptions();
|
const options: Options = this.getOptions();
|
||||||
|
|
||||||
ensureMarkdownConfig(options);
|
|
||||||
|
|
||||||
const {frontMatter} = await options.markdownConfig.parseFrontMatter({
|
const {frontMatter} = await options.markdownConfig.parseFrontMatter({
|
||||||
filePath,
|
filePath,
|
||||||
fileContent,
|
fileContent,
|
||||||
defaultParseFrontMatter: DEFAULT_PARSE_FRONT_MATTER,
|
defaultParseFrontMatter: DEFAULT_PARSE_FRONT_MATTER,
|
||||||
});
|
});
|
||||||
const mdxFrontMatter = validateMDXFrontMatter(frontMatter.mdx);
|
|
||||||
|
|
||||||
const preprocessedContent = preprocessor({
|
|
||||||
fileContent,
|
|
||||||
filePath,
|
|
||||||
admonitions: options.admonitions,
|
|
||||||
markdownConfig: options.markdownConfig,
|
|
||||||
});
|
|
||||||
|
|
||||||
const hasFrontMatter = Object.keys(frontMatter).length > 0;
|
const hasFrontMatter = Object.keys(frontMatter).length > 0;
|
||||||
|
|
||||||
const processor = await createProcessorCached({
|
let result: SimpleProcessorResult;
|
||||||
filePath,
|
|
||||||
options,
|
|
||||||
mdxFrontMatter,
|
|
||||||
});
|
|
||||||
|
|
||||||
let result: {content: string; data: {[key: string]: unknown}};
|
|
||||||
try {
|
try {
|
||||||
result = await processor.process({
|
result = await compileToJSX({
|
||||||
content: preprocessedContent,
|
fileContent,
|
||||||
filePath,
|
filePath,
|
||||||
frontMatter,
|
frontMatter,
|
||||||
|
options,
|
||||||
compilerName,
|
compilerName,
|
||||||
});
|
});
|
||||||
} catch (errorUnknown) {
|
} catch (error) {
|
||||||
const error = errorUnknown as Error;
|
return callback(error as Error);
|
||||||
|
|
||||||
// MDX can emit errors that have useful extra attributes
|
|
||||||
const errorJSON = JSON.stringify(error, null, 2);
|
|
||||||
const errorDetails =
|
|
||||||
errorJSON === '{}'
|
|
||||||
? // regular JS error case: print stacktrace
|
|
||||||
error.stack ?? 'N/A'
|
|
||||||
: // MDX error: print extra attributes + stacktrace
|
|
||||||
`${errorJSON}\n${error.stack}`;
|
|
||||||
|
|
||||||
return callback(
|
|
||||||
new Error(
|
|
||||||
`MDX compilation failed for file ${logger.path(filePath)}\nCause: ${
|
|
||||||
error.message
|
|
||||||
}\nDetails:\n${errorDetails}`,
|
|
||||||
// TODO error cause doesn't seem to be used by Webpack stats.errors :s
|
|
||||||
{cause: error},
|
|
||||||
),
|
|
||||||
);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
const contentTitle = extractContentTitleData(result.data);
|
const contentTitle = extractContentTitleData(result.data);
|
||||||
|
|
|
@ -31,10 +31,13 @@ import type {ProcessorOptions} from '@mdx-js/mdx';
|
||||||
// See https://github.com/microsoft/TypeScript/issues/49721#issuecomment-1517839391
|
// See https://github.com/microsoft/TypeScript/issues/49721#issuecomment-1517839391
|
||||||
type Pluggable = any; // TODO fix this asap
|
type Pluggable = any; // TODO fix this asap
|
||||||
|
|
||||||
type SimpleProcessorResult = {content: string; data: {[key: string]: unknown}};
|
export type SimpleProcessorResult = {
|
||||||
|
content: string;
|
||||||
|
data: {[key: string]: unknown};
|
||||||
|
};
|
||||||
|
|
||||||
// TODO alt interface because impossible to import type Processor (ESM + TS :/)
|
// TODO alt interface because impossible to import type Processor (ESM + TS :/)
|
||||||
type SimpleProcessor = {
|
export type SimpleProcessor = {
|
||||||
process: ({
|
process: ({
|
||||||
content,
|
content,
|
||||||
filePath,
|
filePath,
|
||||||
|
@ -219,28 +222,22 @@ export async function createProcessorUncached(parameters: {
|
||||||
}
|
}
|
||||||
|
|
||||||
// We use different compilers depending on the file type (md vs mdx)
|
// We use different compilers depending on the file type (md vs mdx)
|
||||||
type ProcessorsCacheEntry = {
|
export type SimpleProcessors = {
|
||||||
mdProcessor: SimpleProcessor;
|
mdProcessor: SimpleProcessor;
|
||||||
mdxProcessor: SimpleProcessor;
|
mdxProcessor: SimpleProcessor;
|
||||||
};
|
};
|
||||||
|
|
||||||
// Compilers are cached so that Remark/Rehype plugins can run
|
// Compilers are cached so that Remark/Rehype plugins can run
|
||||||
// expensive code during initialization
|
// expensive code during initialization
|
||||||
const ProcessorsCache = new Map<string | Options, ProcessorsCacheEntry>();
|
const ProcessorsCache = new Map<string | Options, SimpleProcessors>();
|
||||||
|
|
||||||
async function createProcessorsCacheEntry({
|
export async function createProcessors({
|
||||||
options,
|
options,
|
||||||
}: {
|
}: {
|
||||||
options: Options;
|
options: Options;
|
||||||
}): Promise<ProcessorsCacheEntry> {
|
}): Promise<SimpleProcessors> {
|
||||||
const {createProcessorSync} = await createProcessorFactory();
|
const {createProcessorSync} = await createProcessorFactory();
|
||||||
|
return {
|
||||||
const compilers = ProcessorsCache.get(options);
|
|
||||||
if (compilers) {
|
|
||||||
return compilers;
|
|
||||||
}
|
|
||||||
|
|
||||||
const compilerCacheEntry: ProcessorsCacheEntry = {
|
|
||||||
mdProcessor: createProcessorSync({
|
mdProcessor: createProcessorSync({
|
||||||
options,
|
options,
|
||||||
format: 'md',
|
format: 'md',
|
||||||
|
@ -250,13 +247,23 @@ async function createProcessorsCacheEntry({
|
||||||
format: 'mdx',
|
format: 'mdx',
|
||||||
}),
|
}),
|
||||||
};
|
};
|
||||||
|
|
||||||
ProcessorsCache.set(options, compilerCacheEntry);
|
|
||||||
|
|
||||||
return compilerCacheEntry;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
export async function createProcessorCached({
|
async function createProcessorsCacheEntry({
|
||||||
|
options,
|
||||||
|
}: {
|
||||||
|
options: Options;
|
||||||
|
}): Promise<SimpleProcessors> {
|
||||||
|
const compilers = ProcessorsCache.get(options);
|
||||||
|
if (compilers) {
|
||||||
|
return compilers;
|
||||||
|
}
|
||||||
|
const processors = await createProcessors({options});
|
||||||
|
ProcessorsCache.set(options, processors);
|
||||||
|
return processors;
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function getProcessor({
|
||||||
filePath,
|
filePath,
|
||||||
mdxFrontMatter,
|
mdxFrontMatter,
|
||||||
options,
|
options,
|
||||||
|
@ -265,7 +272,8 @@ export async function createProcessorCached({
|
||||||
mdxFrontMatter: MDXFrontMatter;
|
mdxFrontMatter: MDXFrontMatter;
|
||||||
options: Options;
|
options: Options;
|
||||||
}): Promise<SimpleProcessor> {
|
}): Promise<SimpleProcessor> {
|
||||||
const compilers = await createProcessorsCacheEntry({options});
|
const processors =
|
||||||
|
options.processors ?? (await createProcessorsCacheEntry({options}));
|
||||||
|
|
||||||
const format = getFormat({
|
const format = getFormat({
|
||||||
filePath,
|
filePath,
|
||||||
|
@ -273,5 +281,5 @@ export async function createProcessorCached({
|
||||||
markdownConfigFormat: options.markdownConfig.format,
|
markdownConfigFormat: options.markdownConfig.format,
|
||||||
});
|
});
|
||||||
|
|
||||||
return format === 'md' ? compilers.mdProcessor : compilers.mdxProcessor;
|
return format === 'md' ? processors.mdProcessor : processors.mdxProcessor;
|
||||||
}
|
}
|
||||||
|
|
152
packages/docusaurus-mdx-loader/src/utils.ts
Normal file
152
packages/docusaurus-mdx-loader/src/utils.ts
Normal file
|
@ -0,0 +1,152 @@
|
||||||
|
/**
|
||||||
|
* Copyright (c) Facebook, Inc. and its affiliates.
|
||||||
|
*
|
||||||
|
* This source code is licensed under the MIT license found in the
|
||||||
|
* LICENSE file in the root directory of this source tree.
|
||||||
|
*/
|
||||||
|
|
||||||
|
import fs from 'fs-extra';
|
||||||
|
import logger from '@docusaurus/logger';
|
||||||
|
import {escapePath, type WebpackCompilerName} from '@docusaurus/utils';
|
||||||
|
import {getProcessor, type SimpleProcessorResult} from './processor';
|
||||||
|
import {validateMDXFrontMatter} from './frontMatter';
|
||||||
|
import preprocessor from './preprocessor';
|
||||||
|
import type {Options} from './loader';
|
||||||
|
|
||||||
|
/**
|
||||||
|
* When this throws, it generally means that there's no metadata file associated
|
||||||
|
* with this MDX document. It can happen when using MDX partials (usually
|
||||||
|
* starting with _). That's why it's important to provide the `isMDXPartial`
|
||||||
|
* function in config
|
||||||
|
*/
|
||||||
|
export async function readMetadataPath(metadataPath: string): Promise<string> {
|
||||||
|
try {
|
||||||
|
return await fs.readFile(metadataPath, 'utf8');
|
||||||
|
} catch (error) {
|
||||||
|
throw new Error(
|
||||||
|
logger.interpolate`MDX loader can't read MDX metadata file path=${metadataPath}. Maybe the isMDXPartial option function was not provided?`,
|
||||||
|
{cause: error as Error},
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Converts assets an object with Webpack require calls code.
|
||||||
|
* This is useful for mdx files to reference co-located assets using relative
|
||||||
|
* paths. Those assets should enter the Webpack assets pipeline and be hashed.
|
||||||
|
* For now, we only handle that for images and paths starting with `./`:
|
||||||
|
*
|
||||||
|
* `{image: "./myImage.png"}` => `{image: require("./myImage.png")}`
|
||||||
|
*/
|
||||||
|
export function createAssetsExportCode({
|
||||||
|
assets,
|
||||||
|
inlineMarkdownAssetImageFileLoader,
|
||||||
|
}: {
|
||||||
|
assets: unknown;
|
||||||
|
inlineMarkdownAssetImageFileLoader: string;
|
||||||
|
}): string {
|
||||||
|
if (
|
||||||
|
typeof assets !== 'object' ||
|
||||||
|
!assets ||
|
||||||
|
Object.keys(assets).length === 0
|
||||||
|
) {
|
||||||
|
return 'undefined';
|
||||||
|
}
|
||||||
|
|
||||||
|
// TODO implementation can be completed/enhanced
|
||||||
|
function createAssetValueCode(assetValue: unknown): string | undefined {
|
||||||
|
if (Array.isArray(assetValue)) {
|
||||||
|
const arrayItemCodes = assetValue.map(
|
||||||
|
(item: unknown) => createAssetValueCode(item) ?? 'undefined',
|
||||||
|
);
|
||||||
|
return `[${arrayItemCodes.join(', ')}]`;
|
||||||
|
}
|
||||||
|
// Only process string values starting with ./
|
||||||
|
// We could enhance this logic and check if file exists on disc?
|
||||||
|
if (typeof assetValue === 'string' && assetValue.startsWith('./')) {
|
||||||
|
// TODO do we have other use-cases than image assets?
|
||||||
|
// Probably not worth adding more support, as we want to move to Webpack 5 new asset system (https://github.com/facebook/docusaurus/pull/4708)
|
||||||
|
return `require("${inlineMarkdownAssetImageFileLoader}${escapePath(
|
||||||
|
assetValue,
|
||||||
|
)}").default`;
|
||||||
|
}
|
||||||
|
return undefined;
|
||||||
|
}
|
||||||
|
|
||||||
|
const assetEntries = Object.entries(assets);
|
||||||
|
|
||||||
|
const codeLines = assetEntries
|
||||||
|
.map(([key, value]: [string, unknown]) => {
|
||||||
|
const assetRequireCode = createAssetValueCode(value);
|
||||||
|
return assetRequireCode ? `"${key}": ${assetRequireCode},` : undefined;
|
||||||
|
})
|
||||||
|
.filter(Boolean);
|
||||||
|
|
||||||
|
return `{\n${codeLines.join('\n')}\n}`;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* data.contentTitle is set by the remark contentTitle plugin
|
||||||
|
*/
|
||||||
|
export function extractContentTitleData(data: {
|
||||||
|
[key: string]: unknown;
|
||||||
|
}): string | undefined {
|
||||||
|
return data.contentTitle as string | undefined;
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function compileToJSX({
|
||||||
|
filePath,
|
||||||
|
fileContent,
|
||||||
|
frontMatter,
|
||||||
|
options,
|
||||||
|
compilerName,
|
||||||
|
}: {
|
||||||
|
filePath: string;
|
||||||
|
fileContent: string;
|
||||||
|
frontMatter: Record<string, unknown>;
|
||||||
|
options: Options;
|
||||||
|
compilerName: WebpackCompilerName;
|
||||||
|
}): Promise<SimpleProcessorResult> {
|
||||||
|
const preprocessedFileContent = preprocessor({
|
||||||
|
fileContent,
|
||||||
|
filePath,
|
||||||
|
admonitions: options.admonitions,
|
||||||
|
markdownConfig: options.markdownConfig,
|
||||||
|
});
|
||||||
|
|
||||||
|
const mdxFrontMatter = validateMDXFrontMatter(frontMatter.mdx);
|
||||||
|
|
||||||
|
const processor = await getProcessor({
|
||||||
|
filePath,
|
||||||
|
options,
|
||||||
|
mdxFrontMatter,
|
||||||
|
});
|
||||||
|
|
||||||
|
try {
|
||||||
|
return await processor.process({
|
||||||
|
content: preprocessedFileContent,
|
||||||
|
filePath,
|
||||||
|
frontMatter,
|
||||||
|
compilerName,
|
||||||
|
});
|
||||||
|
} catch (errorUnknown) {
|
||||||
|
const error = errorUnknown as Error;
|
||||||
|
|
||||||
|
// MDX can emit errors that have useful extra attributes
|
||||||
|
const errorJSON = JSON.stringify(error, null, 2);
|
||||||
|
const errorDetails =
|
||||||
|
errorJSON === '{}'
|
||||||
|
? // regular JS error case: print stacktrace
|
||||||
|
error.stack ?? 'N/A'
|
||||||
|
: // MDX error: print extra attributes + stacktrace
|
||||||
|
`${errorJSON}\n${error.stack}`;
|
||||||
|
|
||||||
|
throw new Error(
|
||||||
|
`MDX compilation failed for file ${logger.path(filePath)}\nCause: ${
|
||||||
|
error.message
|
||||||
|
}\nDetails:\n${errorDetails}`,
|
||||||
|
// TODO error cause doesn't seem to be used by Webpack stats.errors :s
|
||||||
|
{cause: error},
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
|
@ -107,6 +107,7 @@ const getPlugin = async (
|
||||||
url: 'https://docusaurus.io',
|
url: 'https://docusaurus.io',
|
||||||
markdown,
|
markdown,
|
||||||
future: {},
|
future: {},
|
||||||
|
staticDirectories: ['static'],
|
||||||
} as DocusaurusConfig;
|
} as DocusaurusConfig;
|
||||||
return pluginContentBlog(
|
return pluginContentBlog(
|
||||||
{
|
{
|
||||||
|
|
|
@ -22,6 +22,10 @@ import {
|
||||||
type SourceToPermalink,
|
type SourceToPermalink,
|
||||||
} from '@docusaurus/utils';
|
} from '@docusaurus/utils';
|
||||||
import {getTagsFilePathsToWatch} from '@docusaurus/utils-validation';
|
import {getTagsFilePathsToWatch} from '@docusaurus/utils-validation';
|
||||||
|
import {
|
||||||
|
createMDXLoaderItem,
|
||||||
|
type Options as MDXLoaderOptions,
|
||||||
|
} from '@docusaurus/mdx-loader';
|
||||||
import {
|
import {
|
||||||
getBlogTags,
|
getBlogTags,
|
||||||
paginateBlogPosts,
|
paginateBlogPosts,
|
||||||
|
@ -47,8 +51,7 @@ import type {
|
||||||
BlogContent,
|
BlogContent,
|
||||||
BlogPaginated,
|
BlogPaginated,
|
||||||
} from '@docusaurus/plugin-content-blog';
|
} from '@docusaurus/plugin-content-blog';
|
||||||
import type {Options as MDXLoaderOptions} from '@docusaurus/mdx-loader/lib/loader';
|
import type {RuleSetRule, RuleSetUseItem} from 'webpack';
|
||||||
import type {RuleSetUseItem} from 'webpack';
|
|
||||||
|
|
||||||
const PluginName = 'docusaurus-plugin-content-blog';
|
const PluginName = 'docusaurus-plugin-content-blog';
|
||||||
|
|
||||||
|
@ -127,6 +130,97 @@ export default async function pluginContentBlog(
|
||||||
|
|
||||||
const sourceToPermalinkHelper = createSourceToPermalinkHelper();
|
const sourceToPermalinkHelper = createSourceToPermalinkHelper();
|
||||||
|
|
||||||
|
async function createBlogMDXLoaderRule(): Promise<RuleSetRule> {
|
||||||
|
const {
|
||||||
|
admonitions,
|
||||||
|
rehypePlugins,
|
||||||
|
remarkPlugins,
|
||||||
|
recmaPlugins,
|
||||||
|
truncateMarker,
|
||||||
|
beforeDefaultRemarkPlugins,
|
||||||
|
beforeDefaultRehypePlugins,
|
||||||
|
} = options;
|
||||||
|
|
||||||
|
const contentDirs = getContentPathList(contentPaths);
|
||||||
|
|
||||||
|
const loaderOptions: MDXLoaderOptions = {
|
||||||
|
admonitions,
|
||||||
|
remarkPlugins,
|
||||||
|
rehypePlugins,
|
||||||
|
recmaPlugins,
|
||||||
|
beforeDefaultRemarkPlugins: [
|
||||||
|
footnoteIDFixer,
|
||||||
|
...beforeDefaultRemarkPlugins,
|
||||||
|
],
|
||||||
|
beforeDefaultRehypePlugins,
|
||||||
|
staticDirs: siteConfig.staticDirectories.map((dir) =>
|
||||||
|
path.resolve(siteDir, dir),
|
||||||
|
),
|
||||||
|
siteDir,
|
||||||
|
isMDXPartial: createAbsoluteFilePathMatcher(options.exclude, contentDirs),
|
||||||
|
metadataPath: (mdxPath: string) => {
|
||||||
|
// Note that metadataPath must be the same/in-sync as
|
||||||
|
// the path from createData for each MDX.
|
||||||
|
const aliasedPath = aliasedSitePath(mdxPath, siteDir);
|
||||||
|
return path.join(dataDir, `${docuHash(aliasedPath)}.json`);
|
||||||
|
},
|
||||||
|
// For blog posts a title in markdown is always removed
|
||||||
|
// Blog posts title are rendered separately
|
||||||
|
removeContentTitle: true,
|
||||||
|
// Assets allow to convert some relative images paths to
|
||||||
|
// require() calls
|
||||||
|
// @ts-expect-error: TODO fix typing issue
|
||||||
|
createAssets: ({
|
||||||
|
frontMatter,
|
||||||
|
metadata,
|
||||||
|
}: {
|
||||||
|
frontMatter: BlogPostFrontMatter;
|
||||||
|
metadata: BlogPostMetadata;
|
||||||
|
}): Assets => ({
|
||||||
|
image: frontMatter.image,
|
||||||
|
authorsImageUrls: metadata.authors.map((author) => author.imageURL),
|
||||||
|
}),
|
||||||
|
markdownConfig: siteConfig.markdown,
|
||||||
|
resolveMarkdownLink: ({linkPathname, sourceFilePath}) => {
|
||||||
|
const permalink = resolveMarkdownLinkPathname(linkPathname, {
|
||||||
|
sourceFilePath,
|
||||||
|
sourceToPermalink: sourceToPermalinkHelper.get(),
|
||||||
|
siteDir,
|
||||||
|
contentPaths,
|
||||||
|
});
|
||||||
|
if (permalink === null) {
|
||||||
|
logger.report(
|
||||||
|
onBrokenMarkdownLinks,
|
||||||
|
)`Blog markdown link couldn't be resolved: (url=${linkPathname}) in source file path=${sourceFilePath}`;
|
||||||
|
}
|
||||||
|
return permalink;
|
||||||
|
},
|
||||||
|
};
|
||||||
|
|
||||||
|
function createBlogMarkdownLoader(): RuleSetUseItem {
|
||||||
|
const markdownLoaderOptions: BlogMarkdownLoaderOptions = {
|
||||||
|
truncateMarker,
|
||||||
|
};
|
||||||
|
return {
|
||||||
|
loader: path.resolve(__dirname, './markdownLoader.js'),
|
||||||
|
options: markdownLoaderOptions,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
return {
|
||||||
|
test: /\.mdx?$/i,
|
||||||
|
include: contentDirs
|
||||||
|
// Trailing slash is important, see https://github.com/facebook/docusaurus/pull/3970
|
||||||
|
.map(addTrailingPathSeparator),
|
||||||
|
use: [
|
||||||
|
await createMDXLoaderItem(loaderOptions),
|
||||||
|
createBlogMarkdownLoader(),
|
||||||
|
],
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
const blogMDXLoaderRule = await createBlogMDXLoaderRule();
|
||||||
|
|
||||||
return {
|
return {
|
||||||
name: PluginName,
|
name: PluginName,
|
||||||
|
|
||||||
|
@ -273,91 +367,6 @@ export default async function pluginContentBlog(
|
||||||
},
|
},
|
||||||
|
|
||||||
configureWebpack() {
|
configureWebpack() {
|
||||||
const {
|
|
||||||
admonitions,
|
|
||||||
rehypePlugins,
|
|
||||||
remarkPlugins,
|
|
||||||
recmaPlugins,
|
|
||||||
truncateMarker,
|
|
||||||
beforeDefaultRemarkPlugins,
|
|
||||||
beforeDefaultRehypePlugins,
|
|
||||||
} = options;
|
|
||||||
|
|
||||||
const contentDirs = getContentPathList(contentPaths);
|
|
||||||
|
|
||||||
function createMDXLoader(): RuleSetUseItem {
|
|
||||||
const loaderOptions: MDXLoaderOptions = {
|
|
||||||
admonitions,
|
|
||||||
remarkPlugins,
|
|
||||||
rehypePlugins,
|
|
||||||
recmaPlugins,
|
|
||||||
beforeDefaultRemarkPlugins: [
|
|
||||||
footnoteIDFixer,
|
|
||||||
...beforeDefaultRemarkPlugins,
|
|
||||||
],
|
|
||||||
beforeDefaultRehypePlugins,
|
|
||||||
staticDirs: siteConfig.staticDirectories.map((dir) =>
|
|
||||||
path.resolve(siteDir, dir),
|
|
||||||
),
|
|
||||||
siteDir,
|
|
||||||
isMDXPartial: createAbsoluteFilePathMatcher(
|
|
||||||
options.exclude,
|
|
||||||
contentDirs,
|
|
||||||
),
|
|
||||||
metadataPath: (mdxPath: string) => {
|
|
||||||
// Note that metadataPath must be the same/in-sync as
|
|
||||||
// the path from createData for each MDX.
|
|
||||||
const aliasedPath = aliasedSitePath(mdxPath, siteDir);
|
|
||||||
return path.join(dataDir, `${docuHash(aliasedPath)}.json`);
|
|
||||||
},
|
|
||||||
// For blog posts a title in markdown is always removed
|
|
||||||
// Blog posts title are rendered separately
|
|
||||||
removeContentTitle: true,
|
|
||||||
// Assets allow to convert some relative images paths to
|
|
||||||
// require() calls
|
|
||||||
// @ts-expect-error: TODO fix typing issue
|
|
||||||
createAssets: ({
|
|
||||||
frontMatter,
|
|
||||||
metadata,
|
|
||||||
}: {
|
|
||||||
frontMatter: BlogPostFrontMatter;
|
|
||||||
metadata: BlogPostMetadata;
|
|
||||||
}): Assets => ({
|
|
||||||
image: frontMatter.image,
|
|
||||||
authorsImageUrls: metadata.authors.map((author) => author.imageURL),
|
|
||||||
}),
|
|
||||||
markdownConfig: siteConfig.markdown,
|
|
||||||
resolveMarkdownLink: ({linkPathname, sourceFilePath}) => {
|
|
||||||
const permalink = resolveMarkdownLinkPathname(linkPathname, {
|
|
||||||
sourceFilePath,
|
|
||||||
sourceToPermalink: sourceToPermalinkHelper.get(),
|
|
||||||
siteDir,
|
|
||||||
contentPaths,
|
|
||||||
});
|
|
||||||
if (permalink === null) {
|
|
||||||
logger.report(
|
|
||||||
onBrokenMarkdownLinks,
|
|
||||||
)`Blog markdown link couldn't be resolved: (url=${linkPathname}) in source file path=${sourceFilePath}`;
|
|
||||||
}
|
|
||||||
return permalink;
|
|
||||||
},
|
|
||||||
};
|
|
||||||
return {
|
|
||||||
loader: require.resolve('@docusaurus/mdx-loader'),
|
|
||||||
options: loaderOptions,
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
function createBlogMarkdownLoader(): RuleSetUseItem {
|
|
||||||
const loaderOptions: BlogMarkdownLoaderOptions = {
|
|
||||||
truncateMarker,
|
|
||||||
};
|
|
||||||
return {
|
|
||||||
loader: path.resolve(__dirname, './markdownLoader.js'),
|
|
||||||
options: loaderOptions,
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
return {
|
return {
|
||||||
resolve: {
|
resolve: {
|
||||||
alias: {
|
alias: {
|
||||||
|
@ -365,15 +374,7 @@ export default async function pluginContentBlog(
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
module: {
|
module: {
|
||||||
rules: [
|
rules: [blogMDXLoaderRule],
|
||||||
{
|
|
||||||
test: /\.mdx?$/i,
|
|
||||||
include: contentDirs
|
|
||||||
// Trailing slash is important, see https://github.com/facebook/docusaurus/pull/3970
|
|
||||||
.map(addTrailingPathSeparator),
|
|
||||||
use: [createMDXLoader(), createBlogMarkdownLoader()],
|
|
||||||
},
|
|
||||||
],
|
|
||||||
},
|
},
|
||||||
};
|
};
|
||||||
},
|
},
|
||||||
|
|
|
@ -26,6 +26,10 @@ import {
|
||||||
getTagsFile,
|
getTagsFile,
|
||||||
getTagsFilePathsToWatch,
|
getTagsFilePathsToWatch,
|
||||||
} from '@docusaurus/utils-validation';
|
} from '@docusaurus/utils-validation';
|
||||||
|
import {
|
||||||
|
createMDXLoaderRule,
|
||||||
|
type Options as MDXLoaderOptions,
|
||||||
|
} from '@docusaurus/mdx-loader';
|
||||||
import {loadSidebars, resolveSidebarPathOption} from './sidebars';
|
import {loadSidebars, resolveSidebarPathOption} from './sidebars';
|
||||||
import {CategoryMetadataFilenamePattern} from './sidebars/generator';
|
import {CategoryMetadataFilenamePattern} from './sidebars/generator';
|
||||||
import {
|
import {
|
||||||
|
@ -49,7 +53,6 @@ import {
|
||||||
} from './translations';
|
} from './translations';
|
||||||
import {createAllRoutes} from './routes';
|
import {createAllRoutes} from './routes';
|
||||||
import {createSidebarsUtils} from './sidebars/utils';
|
import {createSidebarsUtils} from './sidebars/utils';
|
||||||
import type {Options as MDXLoaderOptions} from '@docusaurus/mdx-loader';
|
|
||||||
|
|
||||||
import type {
|
import type {
|
||||||
PluginOptions,
|
PluginOptions,
|
||||||
|
@ -61,7 +64,7 @@ import type {
|
||||||
} from '@docusaurus/plugin-content-docs';
|
} from '@docusaurus/plugin-content-docs';
|
||||||
import type {LoadContext, Plugin} from '@docusaurus/types';
|
import type {LoadContext, Plugin} from '@docusaurus/types';
|
||||||
import type {DocFile, FullVersion} from './types';
|
import type {DocFile, FullVersion} from './types';
|
||||||
import type {RuleSetUseItem} from 'webpack';
|
import type {RuleSetRule} from 'webpack';
|
||||||
|
|
||||||
// TODO this is bad, we should have a better way to do this (new lifecycle?)
|
// TODO this is bad, we should have a better way to do this (new lifecycle?)
|
||||||
// The source to permalink is currently a mutable map passed to the mdx loader
|
// The source to permalink is currently a mutable map passed to the mdx loader
|
||||||
|
@ -114,6 +117,68 @@ export default async function pluginContentDocs(
|
||||||
|
|
||||||
const sourceToPermalinkHelper = createSourceToPermalinkHelper();
|
const sourceToPermalinkHelper = createSourceToPermalinkHelper();
|
||||||
|
|
||||||
|
async function createDocsMDXLoaderRule(): Promise<RuleSetRule> {
|
||||||
|
const {
|
||||||
|
rehypePlugins,
|
||||||
|
remarkPlugins,
|
||||||
|
recmaPlugins,
|
||||||
|
beforeDefaultRehypePlugins,
|
||||||
|
beforeDefaultRemarkPlugins,
|
||||||
|
} = options;
|
||||||
|
const contentDirs = versionsMetadata
|
||||||
|
.flatMap(getContentPathList)
|
||||||
|
// Trailing slash is important, see https://github.com/facebook/docusaurus/pull/3970
|
||||||
|
.map(addTrailingPathSeparator);
|
||||||
|
|
||||||
|
const loaderOptions: MDXLoaderOptions = {
|
||||||
|
admonitions: options.admonitions,
|
||||||
|
remarkPlugins,
|
||||||
|
rehypePlugins,
|
||||||
|
recmaPlugins,
|
||||||
|
beforeDefaultRehypePlugins,
|
||||||
|
beforeDefaultRemarkPlugins,
|
||||||
|
staticDirs: siteConfig.staticDirectories.map((dir) =>
|
||||||
|
path.resolve(siteDir, dir),
|
||||||
|
),
|
||||||
|
siteDir,
|
||||||
|
isMDXPartial: createAbsoluteFilePathMatcher(options.exclude, contentDirs),
|
||||||
|
metadataPath: (mdxPath: string) => {
|
||||||
|
// Note that metadataPath must be the same/in-sync as
|
||||||
|
// the path from createData for each MDX.
|
||||||
|
const aliasedPath = aliasedSitePath(mdxPath, siteDir);
|
||||||
|
return path.join(dataDir, `${docuHash(aliasedPath)}.json`);
|
||||||
|
},
|
||||||
|
// Assets allow to convert some relative images paths to
|
||||||
|
// require(...) calls
|
||||||
|
createAssets: ({frontMatter}: {frontMatter: DocFrontMatter}) => ({
|
||||||
|
image: frontMatter.image,
|
||||||
|
}),
|
||||||
|
markdownConfig: siteConfig.markdown,
|
||||||
|
resolveMarkdownLink: ({linkPathname, sourceFilePath}) => {
|
||||||
|
const version = getVersionFromSourceFilePath(
|
||||||
|
sourceFilePath,
|
||||||
|
versionsMetadata,
|
||||||
|
);
|
||||||
|
const permalink = resolveMarkdownLinkPathname(linkPathname, {
|
||||||
|
sourceFilePath,
|
||||||
|
sourceToPermalink: sourceToPermalinkHelper.get(),
|
||||||
|
siteDir,
|
||||||
|
contentPaths: version,
|
||||||
|
});
|
||||||
|
if (permalink === null) {
|
||||||
|
logger.report(
|
||||||
|
siteConfig.onBrokenMarkdownLinks,
|
||||||
|
)`Docs markdown link couldn't be resolved: (url=${linkPathname}) in source file path=${sourceFilePath} for version number=${version.versionName}`;
|
||||||
|
}
|
||||||
|
return permalink;
|
||||||
|
},
|
||||||
|
};
|
||||||
|
|
||||||
|
return createMDXLoaderRule({include: contentDirs, options: loaderOptions});
|
||||||
|
}
|
||||||
|
|
||||||
|
const docsMDXLoaderRule = await createDocsMDXLoaderRule();
|
||||||
|
|
||||||
return {
|
return {
|
||||||
name: 'docusaurus-plugin-content-docs',
|
name: 'docusaurus-plugin-content-docs',
|
||||||
|
|
||||||
|
@ -289,74 +354,7 @@ export default async function pluginContentDocs(
|
||||||
});
|
});
|
||||||
},
|
},
|
||||||
|
|
||||||
configureWebpack(_config, isServer, utils, content) {
|
configureWebpack() {
|
||||||
const {
|
|
||||||
rehypePlugins,
|
|
||||||
remarkPlugins,
|
|
||||||
recmaPlugins,
|
|
||||||
beforeDefaultRehypePlugins,
|
|
||||||
beforeDefaultRemarkPlugins,
|
|
||||||
} = options;
|
|
||||||
|
|
||||||
const contentDirs = versionsMetadata
|
|
||||||
.flatMap(getContentPathList)
|
|
||||||
// Trailing slash is important, see https://github.com/facebook/docusaurus/pull/3970
|
|
||||||
.map(addTrailingPathSeparator);
|
|
||||||
|
|
||||||
function createMDXLoader(): RuleSetUseItem {
|
|
||||||
const loaderOptions: MDXLoaderOptions = {
|
|
||||||
admonitions: options.admonitions,
|
|
||||||
remarkPlugins,
|
|
||||||
rehypePlugins,
|
|
||||||
recmaPlugins,
|
|
||||||
beforeDefaultRehypePlugins,
|
|
||||||
beforeDefaultRemarkPlugins,
|
|
||||||
staticDirs: siteConfig.staticDirectories.map((dir) =>
|
|
||||||
path.resolve(siteDir, dir),
|
|
||||||
),
|
|
||||||
siteDir,
|
|
||||||
isMDXPartial: createAbsoluteFilePathMatcher(
|
|
||||||
options.exclude,
|
|
||||||
contentDirs,
|
|
||||||
),
|
|
||||||
metadataPath: (mdxPath: string) => {
|
|
||||||
// Note that metadataPath must be the same/in-sync as
|
|
||||||
// the path from createData for each MDX.
|
|
||||||
const aliasedPath = aliasedSitePath(mdxPath, siteDir);
|
|
||||||
return path.join(dataDir, `${docuHash(aliasedPath)}.json`);
|
|
||||||
},
|
|
||||||
// Assets allow to convert some relative images paths to
|
|
||||||
// require(...) calls
|
|
||||||
createAssets: ({frontMatter}: {frontMatter: DocFrontMatter}) => ({
|
|
||||||
image: frontMatter.image,
|
|
||||||
}),
|
|
||||||
markdownConfig: siteConfig.markdown,
|
|
||||||
resolveMarkdownLink: ({linkPathname, sourceFilePath}) => {
|
|
||||||
const version = getVersionFromSourceFilePath(
|
|
||||||
sourceFilePath,
|
|
||||||
content.loadedVersions,
|
|
||||||
);
|
|
||||||
const permalink = resolveMarkdownLinkPathname(linkPathname, {
|
|
||||||
sourceFilePath,
|
|
||||||
sourceToPermalink: sourceToPermalinkHelper.get(),
|
|
||||||
siteDir,
|
|
||||||
contentPaths: version,
|
|
||||||
});
|
|
||||||
if (permalink === null) {
|
|
||||||
logger.report(
|
|
||||||
siteConfig.onBrokenMarkdownLinks,
|
|
||||||
)`Docs markdown link couldn't be resolved: (url=${linkPathname}) in source file path=${sourceFilePath} for version number=${version.versionName}`;
|
|
||||||
}
|
|
||||||
return permalink;
|
|
||||||
},
|
|
||||||
};
|
|
||||||
|
|
||||||
return {
|
|
||||||
loader: require.resolve('@docusaurus/mdx-loader'),
|
|
||||||
options: loaderOptions,
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
return {
|
return {
|
||||||
ignoreWarnings: [
|
ignoreWarnings: [
|
||||||
// Suppress warnings about non-existing of versions file.
|
// Suppress warnings about non-existing of versions file.
|
||||||
|
@ -370,13 +368,7 @@ export default async function pluginContentDocs(
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
module: {
|
module: {
|
||||||
rules: [
|
rules: [docsMDXLoaderRule],
|
||||||
{
|
|
||||||
test: /\.mdx?$/i,
|
|
||||||
include: contentDirs,
|
|
||||||
use: [createMDXLoader()],
|
|
||||||
},
|
|
||||||
],
|
|
||||||
},
|
},
|
||||||
};
|
};
|
||||||
},
|
},
|
||||||
|
|
|
@ -16,7 +16,7 @@ describe('docusaurus-plugin-content-pages', () => {
|
||||||
it('loads simple pages', async () => {
|
it('loads simple pages', async () => {
|
||||||
const siteDir = path.join(__dirname, '__fixtures__', 'website');
|
const siteDir = path.join(__dirname, '__fixtures__', 'website');
|
||||||
const context = await loadContext({siteDir});
|
const context = await loadContext({siteDir});
|
||||||
const plugin = pluginContentPages(
|
const plugin = await pluginContentPages(
|
||||||
context,
|
context,
|
||||||
validateOptions({
|
validateOptions({
|
||||||
validate: normalizePluginOptions,
|
validate: normalizePluginOptions,
|
||||||
|
@ -33,7 +33,7 @@ describe('docusaurus-plugin-content-pages', () => {
|
||||||
it('loads simple pages with french translations', async () => {
|
it('loads simple pages with french translations', async () => {
|
||||||
const siteDir = path.join(__dirname, '__fixtures__', 'website');
|
const siteDir = path.join(__dirname, '__fixtures__', 'website');
|
||||||
const context = await loadContext({siteDir, locale: 'fr'});
|
const context = await loadContext({siteDir, locale: 'fr'});
|
||||||
const plugin = pluginContentPages(
|
const plugin = await pluginContentPages(
|
||||||
context,
|
context,
|
||||||
validateOptions({
|
validateOptions({
|
||||||
validate: normalizePluginOptions,
|
validate: normalizePluginOptions,
|
||||||
|
@ -50,7 +50,7 @@ describe('docusaurus-plugin-content-pages', () => {
|
||||||
it('loads simple pages with last update', async () => {
|
it('loads simple pages with last update', async () => {
|
||||||
const siteDir = path.join(__dirname, '__fixtures__', 'website');
|
const siteDir = path.join(__dirname, '__fixtures__', 'website');
|
||||||
const context = await loadContext({siteDir});
|
const context = await loadContext({siteDir});
|
||||||
const plugin = pluginContentPages(
|
const plugin = await pluginContentPages(
|
||||||
context,
|
context,
|
||||||
validateOptions({
|
validateOptions({
|
||||||
validate: normalizePluginOptions,
|
validate: normalizePluginOptions,
|
||||||
|
|
|
@ -14,6 +14,10 @@ import {
|
||||||
createAbsoluteFilePathMatcher,
|
createAbsoluteFilePathMatcher,
|
||||||
DEFAULT_PLUGIN_ID,
|
DEFAULT_PLUGIN_ID,
|
||||||
} from '@docusaurus/utils';
|
} from '@docusaurus/utils';
|
||||||
|
import {
|
||||||
|
createMDXLoaderRule,
|
||||||
|
type Options as MDXLoaderOptions,
|
||||||
|
} from '@docusaurus/mdx-loader';
|
||||||
import {createAllRoutes} from './routes';
|
import {createAllRoutes} from './routes';
|
||||||
import {
|
import {
|
||||||
createPagesContentPaths,
|
createPagesContentPaths,
|
||||||
|
@ -26,13 +30,12 @@ import type {
|
||||||
LoadedContent,
|
LoadedContent,
|
||||||
PageFrontMatter,
|
PageFrontMatter,
|
||||||
} from '@docusaurus/plugin-content-pages';
|
} from '@docusaurus/plugin-content-pages';
|
||||||
import type {RuleSetUseItem} from 'webpack';
|
import type {RuleSetRule} from 'webpack';
|
||||||
import type {Options as MDXLoaderOptions} from '@docusaurus/mdx-loader/lib/loader';
|
|
||||||
|
|
||||||
export default function pluginContentPages(
|
export default async function pluginContentPages(
|
||||||
context: LoadContext,
|
context: LoadContext,
|
||||||
options: PluginOptions,
|
options: PluginOptions,
|
||||||
): Plugin<LoadedContent | null> {
|
): Promise<Plugin<LoadedContent | null>> {
|
||||||
const {siteConfig, siteDir, generatedFilesDir} = context;
|
const {siteConfig, siteDir, generatedFilesDir} = context;
|
||||||
|
|
||||||
const contentPaths = createPagesContentPaths({context, options});
|
const contentPaths = createPagesContentPaths({context, options});
|
||||||
|
@ -43,6 +46,53 @@ export default function pluginContentPages(
|
||||||
);
|
);
|
||||||
const dataDir = path.join(pluginDataDirRoot, options.id ?? DEFAULT_PLUGIN_ID);
|
const dataDir = path.join(pluginDataDirRoot, options.id ?? DEFAULT_PLUGIN_ID);
|
||||||
|
|
||||||
|
async function createPagesMDXLoaderRule(): Promise<RuleSetRule> {
|
||||||
|
const {
|
||||||
|
admonitions,
|
||||||
|
rehypePlugins,
|
||||||
|
remarkPlugins,
|
||||||
|
recmaPlugins,
|
||||||
|
beforeDefaultRehypePlugins,
|
||||||
|
beforeDefaultRemarkPlugins,
|
||||||
|
} = options;
|
||||||
|
const contentDirs = getContentPathList(contentPaths);
|
||||||
|
|
||||||
|
const loaderOptions: MDXLoaderOptions = {
|
||||||
|
admonitions,
|
||||||
|
remarkPlugins,
|
||||||
|
rehypePlugins,
|
||||||
|
recmaPlugins,
|
||||||
|
beforeDefaultRehypePlugins,
|
||||||
|
beforeDefaultRemarkPlugins,
|
||||||
|
staticDirs: siteConfig.staticDirectories.map((dir) =>
|
||||||
|
path.resolve(siteDir, dir),
|
||||||
|
),
|
||||||
|
siteDir,
|
||||||
|
isMDXPartial: createAbsoluteFilePathMatcher(options.exclude, contentDirs),
|
||||||
|
metadataPath: (mdxPath: string) => {
|
||||||
|
// Note that metadataPath must be the same/in-sync as
|
||||||
|
// the path from createData for each MDX.
|
||||||
|
const aliasedSource = aliasedSitePath(mdxPath, siteDir);
|
||||||
|
return path.join(dataDir, `${docuHash(aliasedSource)}.json`);
|
||||||
|
},
|
||||||
|
// Assets allow to convert some relative images paths to
|
||||||
|
// require(...) calls
|
||||||
|
createAssets: ({frontMatter}: {frontMatter: PageFrontMatter}) => ({
|
||||||
|
image: frontMatter.image,
|
||||||
|
}),
|
||||||
|
markdownConfig: siteConfig.markdown,
|
||||||
|
};
|
||||||
|
|
||||||
|
return createMDXLoaderRule({
|
||||||
|
include: contentDirs
|
||||||
|
// Trailing slash is important, see https://github.com/facebook/docusaurus/pull/3970
|
||||||
|
.map(addTrailingPathSeparator),
|
||||||
|
options: loaderOptions,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
const pagesMDXLoaderRule = await createPagesMDXLoaderRule();
|
||||||
|
|
||||||
return {
|
return {
|
||||||
name: 'docusaurus-plugin-content-pages',
|
name: 'docusaurus-plugin-content-pages',
|
||||||
|
|
||||||
|
@ -68,63 +118,9 @@ export default function pluginContentPages(
|
||||||
},
|
},
|
||||||
|
|
||||||
configureWebpack() {
|
configureWebpack() {
|
||||||
const {
|
|
||||||
admonitions,
|
|
||||||
rehypePlugins,
|
|
||||||
remarkPlugins,
|
|
||||||
recmaPlugins,
|
|
||||||
beforeDefaultRehypePlugins,
|
|
||||||
beforeDefaultRemarkPlugins,
|
|
||||||
} = options;
|
|
||||||
const contentDirs = getContentPathList(contentPaths);
|
|
||||||
|
|
||||||
function createMDXLoader(): RuleSetUseItem {
|
|
||||||
const loaderOptions: MDXLoaderOptions = {
|
|
||||||
admonitions,
|
|
||||||
remarkPlugins,
|
|
||||||
rehypePlugins,
|
|
||||||
recmaPlugins,
|
|
||||||
beforeDefaultRehypePlugins,
|
|
||||||
beforeDefaultRemarkPlugins,
|
|
||||||
staticDirs: siteConfig.staticDirectories.map((dir) =>
|
|
||||||
path.resolve(siteDir, dir),
|
|
||||||
),
|
|
||||||
siteDir,
|
|
||||||
isMDXPartial: createAbsoluteFilePathMatcher(
|
|
||||||
options.exclude,
|
|
||||||
contentDirs,
|
|
||||||
),
|
|
||||||
metadataPath: (mdxPath: string) => {
|
|
||||||
// Note that metadataPath must be the same/in-sync as
|
|
||||||
// the path from createData for each MDX.
|
|
||||||
const aliasedSource = aliasedSitePath(mdxPath, siteDir);
|
|
||||||
return path.join(dataDir, `${docuHash(aliasedSource)}.json`);
|
|
||||||
},
|
|
||||||
// Assets allow to convert some relative images paths to
|
|
||||||
// require(...) calls
|
|
||||||
createAssets: ({frontMatter}: {frontMatter: PageFrontMatter}) => ({
|
|
||||||
image: frontMatter.image,
|
|
||||||
}),
|
|
||||||
markdownConfig: siteConfig.markdown,
|
|
||||||
};
|
|
||||||
|
|
||||||
return {
|
|
||||||
loader: require.resolve('@docusaurus/mdx-loader'),
|
|
||||||
options: loaderOptions,
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
return {
|
return {
|
||||||
module: {
|
module: {
|
||||||
rules: [
|
rules: [pagesMDXLoaderRule],
|
||||||
{
|
|
||||||
test: /\.mdx?$/i,
|
|
||||||
include: contentDirs
|
|
||||||
// Trailing slash is important, see https://github.com/facebook/docusaurus/pull/3970
|
|
||||||
.map(addTrailingPathSeparator),
|
|
||||||
use: [createMDXLoader()],
|
|
||||||
},
|
|
||||||
],
|
|
||||||
},
|
},
|
||||||
};
|
};
|
||||||
},
|
},
|
||||||
|
|
|
@ -28,6 +28,7 @@ async function testLoad({
|
||||||
baseUrl: '/',
|
baseUrl: '/',
|
||||||
trailingSlash: true,
|
trailingSlash: true,
|
||||||
themeConfig: {},
|
themeConfig: {},
|
||||||
|
staticDirectories: [],
|
||||||
presets: [],
|
presets: [],
|
||||||
plugins,
|
plugins,
|
||||||
themes,
|
themes,
|
||||||
|
|
|
@ -266,7 +266,7 @@ export async function loadPlugins(
|
||||||
// TODO probably not the ideal place to hardcode those plugins
|
// TODO probably not the ideal place to hardcode those plugins
|
||||||
initializedPlugins.push(
|
initializedPlugins.push(
|
||||||
createBootstrapPlugin(context),
|
createBootstrapPlugin(context),
|
||||||
createMDXFallbackPlugin(context),
|
await createMDXFallbackPlugin(context),
|
||||||
);
|
);
|
||||||
|
|
||||||
const plugins = await executeAllPluginsContentLoading({
|
const plugins = await executeAllPluginsContentLoading({
|
||||||
|
|
|
@ -6,13 +6,13 @@
|
||||||
*/
|
*/
|
||||||
|
|
||||||
import path from 'path';
|
import path from 'path';
|
||||||
|
import {createMDXLoaderItem} from '@docusaurus/mdx-loader';
|
||||||
import type {RuleSetRule} from 'webpack';
|
import type {RuleSetRule} from 'webpack';
|
||||||
import type {
|
import type {
|
||||||
HtmlTagObject,
|
HtmlTagObject,
|
||||||
LoadContext,
|
LoadContext,
|
||||||
InitializedPlugin,
|
InitializedPlugin,
|
||||||
} from '@docusaurus/types';
|
} from '@docusaurus/types';
|
||||||
import type {Options as MDXLoaderOptions} from '@docusaurus/mdx-loader';
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Make a synthetic plugin to:
|
* Make a synthetic plugin to:
|
||||||
|
@ -75,10 +75,23 @@ export function createBootstrapPlugin({
|
||||||
* content plugins. This allows to do things such as importing repo/README.md as
|
* content plugins. This allows to do things such as importing repo/README.md as
|
||||||
* a partial from another doc. Not ideal solution, but good enough for now
|
* a partial from another doc. Not ideal solution, but good enough for now
|
||||||
*/
|
*/
|
||||||
export function createMDXFallbackPlugin({
|
export async function createMDXFallbackPlugin({
|
||||||
siteDir,
|
siteDir,
|
||||||
siteConfig,
|
siteConfig,
|
||||||
}: LoadContext): InitializedPlugin {
|
}: LoadContext): Promise<InitializedPlugin> {
|
||||||
|
const mdxLoaderItem = await createMDXLoaderItem({
|
||||||
|
admonitions: true,
|
||||||
|
staticDirs: siteConfig.staticDirectories.map((dir) =>
|
||||||
|
path.resolve(siteDir, dir),
|
||||||
|
),
|
||||||
|
siteDir,
|
||||||
|
// External MDX files are always meant to be imported as partials
|
||||||
|
isMDXPartial: () => true,
|
||||||
|
// External MDX files might have front matter, just disable the warning
|
||||||
|
isMDXPartialFrontMatterWarningDisabled: true,
|
||||||
|
markdownConfig: siteConfig.markdown,
|
||||||
|
});
|
||||||
|
|
||||||
return {
|
return {
|
||||||
name: 'docusaurus-mdx-fallback-plugin',
|
name: 'docusaurus-mdx-fallback-plugin',
|
||||||
options: {
|
options: {
|
||||||
|
@ -99,18 +112,6 @@ export function createMDXFallbackPlugin({
|
||||||
return isMDXRule ? (rule.include as string[]) : [];
|
return isMDXRule ? (rule.include as string[]) : [];
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
const mdxLoaderOptions: MDXLoaderOptions = {
|
|
||||||
admonitions: true,
|
|
||||||
staticDirs: siteConfig.staticDirectories.map((dir) =>
|
|
||||||
path.resolve(siteDir, dir),
|
|
||||||
),
|
|
||||||
siteDir,
|
|
||||||
// External MDX files are always meant to be imported as partials
|
|
||||||
isMDXPartial: () => true,
|
|
||||||
// External MDX files might have front matter, just disable the warning
|
|
||||||
isMDXPartialFrontMatterWarningDisabled: true,
|
|
||||||
markdownConfig: siteConfig.markdown,
|
|
||||||
};
|
|
||||||
|
|
||||||
return {
|
return {
|
||||||
module: {
|
module: {
|
||||||
|
@ -118,12 +119,7 @@ export function createMDXFallbackPlugin({
|
||||||
{
|
{
|
||||||
test: /\.mdx?$/i,
|
test: /\.mdx?$/i,
|
||||||
exclude: getMDXFallbackExcludedPaths(),
|
exclude: getMDXFallbackExcludedPaths(),
|
||||||
use: [
|
use: [mdxLoaderItem],
|
||||||
{
|
|
||||||
loader: require.resolve('@docusaurus/mdx-loader'),
|
|
||||||
options: mdxLoaderOptions,
|
|
||||||
},
|
|
||||||
],
|
|
||||||
},
|
},
|
||||||
],
|
],
|
||||||
},
|
},
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue