mirror of
https://github.com/facebook/docusaurus.git
synced 2025-07-23 19:48:54 +02:00
refactor(core): improve handling of server bundle (#10429)
This commit is contained in:
parent
1c56fa5830
commit
97bd815d9a
10 changed files with 106 additions and 54 deletions
|
@ -28,10 +28,6 @@ import type {LoaderContext} from 'webpack';
|
|||
// See https://github.com/microsoft/TypeScript/issues/49721#issuecomment-1517839391
|
||||
type Pluggable = any; // TODO fix this asap
|
||||
|
||||
const {
|
||||
loaders: {inlineMarkdownAssetImageFileLoader},
|
||||
} = getFileLoaderUtils();
|
||||
|
||||
export type MDXPlugin = Pluggable;
|
||||
|
||||
export type Options = Partial<MDXOptions> & {
|
||||
|
@ -72,7 +68,13 @@ async function readMetadataPath(metadataPath: string) {
|
|||
*
|
||||
* `{image: "./myImage.png"}` => `{image: require("./myImage.png")}`
|
||||
*/
|
||||
function createAssetsExportCode(assets: unknown) {
|
||||
function createAssetsExportCode({
|
||||
assets,
|
||||
inlineMarkdownAssetImageFileLoader,
|
||||
}: {
|
||||
assets: unknown;
|
||||
inlineMarkdownAssetImageFileLoader: string;
|
||||
}) {
|
||||
if (
|
||||
typeof assets !== 'object' ||
|
||||
!assets ||
|
||||
|
@ -245,13 +247,23 @@ ${JSON.stringify(frontMatter, null, 2)}`;
|
|||
? options.createAssets({frontMatter, metadata})
|
||||
: undefined;
|
||||
|
||||
const fileLoaderUtils = getFileLoaderUtils(compilerName === 'server');
|
||||
|
||||
// TODO use remark plugins to insert extra exports instead of string concat?
|
||||
// cf how the toc is exported
|
||||
const exportsCode = `
|
||||
export const frontMatter = ${stringifyObject(frontMatter)};
|
||||
export const contentTitle = ${stringifyObject(contentTitle)};
|
||||
${metadataJsonString ? `export const metadata = ${metadataJsonString};` : ''}
|
||||
${assets ? `export const assets = ${createAssetsExportCode(assets)};` : ''}
|
||||
${
|
||||
assets
|
||||
? `export const assets = ${createAssetsExportCode({
|
||||
assets,
|
||||
inlineMarkdownAssetImageFileLoader:
|
||||
fileLoaderUtils.loaders.inlineMarkdownAssetImageFileLoader,
|
||||
})};`
|
||||
: ''
|
||||
}
|
||||
`;
|
||||
|
||||
const code = `
|
||||
|
|
|
@ -13,8 +13,8 @@ import {
|
|||
toMessageRelativeFilePath,
|
||||
posixPath,
|
||||
escapePath,
|
||||
getFileLoaderUtils,
|
||||
findAsyncSequential,
|
||||
getFileLoaderUtils,
|
||||
} from '@docusaurus/utils';
|
||||
import escapeHtml from 'escape-html';
|
||||
import sizeOf from 'image-size';
|
||||
|
@ -27,10 +27,6 @@ import type {MdxJsxTextElement} from 'mdast-util-mdx';
|
|||
import type {Image} from 'mdast';
|
||||
import type {Parent} from 'unist';
|
||||
|
||||
const {
|
||||
loaders: {inlineMarkdownImageFileLoader},
|
||||
} = getFileLoaderUtils();
|
||||
|
||||
type PluginOptions = {
|
||||
staticDirs: string[];
|
||||
siteDir: string;
|
||||
|
@ -38,6 +34,7 @@ type PluginOptions = {
|
|||
|
||||
type Context = PluginOptions & {
|
||||
filePath: string;
|
||||
inlineMarkdownImageFileLoader: string;
|
||||
};
|
||||
|
||||
type Target = [node: Image, index: number, parent: Parent];
|
||||
|
@ -45,21 +42,21 @@ type Target = [node: Image, index: number, parent: Parent];
|
|||
async function toImageRequireNode(
|
||||
[node]: Target,
|
||||
imagePath: string,
|
||||
filePath: string,
|
||||
context: Context,
|
||||
) {
|
||||
// MdxJsxTextElement => see https://github.com/facebook/docusaurus/pull/8288#discussion_r1125871405
|
||||
const jsxNode = node as unknown as MdxJsxTextElement;
|
||||
const attributes: MdxJsxTextElement['attributes'] = [];
|
||||
|
||||
let relativeImagePath = posixPath(
|
||||
path.relative(path.dirname(filePath), imagePath),
|
||||
path.relative(path.dirname(context.filePath), imagePath),
|
||||
);
|
||||
relativeImagePath = `./${relativeImagePath}`;
|
||||
|
||||
const parsedUrl = url.parse(node.url);
|
||||
const hash = parsedUrl.hash ?? '';
|
||||
const search = parsedUrl.search ?? '';
|
||||
const requireString = `${inlineMarkdownImageFileLoader}${
|
||||
const requireString = `${context.inlineMarkdownImageFileLoader}${
|
||||
escapePath(relativeImagePath) + search
|
||||
}`;
|
||||
if (node.alt) {
|
||||
|
@ -186,21 +183,26 @@ async function processImageNode(target: Target, context: Context) {
|
|||
// We try to convert image urls without protocol to images with require calls
|
||||
// going through webpack ensures that image assets exist at build time
|
||||
const imagePath = await getImageAbsolutePath(parsedUrl.pathname, context);
|
||||
await toImageRequireNode(target, imagePath, context.filePath);
|
||||
await toImageRequireNode(target, imagePath, context);
|
||||
}
|
||||
|
||||
export default function plugin(options: PluginOptions): Transformer {
|
||||
return async (root, vfile) => {
|
||||
const {visit} = await import('unist-util-visit');
|
||||
|
||||
const fileLoaderUtils = getFileLoaderUtils(
|
||||
vfile.data.compilerName === 'server',
|
||||
);
|
||||
const context: Context = {
|
||||
...options,
|
||||
filePath: vfile.path!,
|
||||
inlineMarkdownImageFileLoader:
|
||||
fileLoaderUtils.loaders.inlineMarkdownImageFileLoader,
|
||||
};
|
||||
|
||||
const promises: Promise<void>[] = [];
|
||||
visit(root, 'image', (node: Image, index, parent) => {
|
||||
promises.push(
|
||||
processImageNode([node, index, parent!], {
|
||||
...options,
|
||||
filePath: vfile.path!,
|
||||
}),
|
||||
);
|
||||
promises.push(processImageNode([node, index, parent!], context));
|
||||
});
|
||||
await Promise.all(promises);
|
||||
};
|
||||
|
|
|
@ -12,8 +12,8 @@ import {
|
|||
toMessageRelativeFilePath,
|
||||
posixPath,
|
||||
escapePath,
|
||||
getFileLoaderUtils,
|
||||
findAsyncSequential,
|
||||
getFileLoaderUtils,
|
||||
} from '@docusaurus/utils';
|
||||
import escapeHtml from 'escape-html';
|
||||
import {assetRequireAttributeValue, transformNode} from '../utils';
|
||||
|
@ -24,10 +24,6 @@ import type {MdxJsxTextElement} from 'mdast-util-mdx';
|
|||
import type {Parent} from 'unist';
|
||||
import type {Link, Literal} from 'mdast';
|
||||
|
||||
const {
|
||||
loaders: {inlineMarkdownLinkFileLoader},
|
||||
} = getFileLoaderUtils();
|
||||
|
||||
type PluginOptions = {
|
||||
staticDirs: string[];
|
||||
siteDir: string;
|
||||
|
@ -35,6 +31,7 @@ type PluginOptions = {
|
|||
|
||||
type Context = PluginOptions & {
|
||||
filePath: string;
|
||||
inlineMarkdownLinkFileLoader: string;
|
||||
};
|
||||
|
||||
type Target = [node: Link, index: number, parent: Parent];
|
||||
|
@ -45,7 +42,7 @@ type Target = [node: Link, index: number, parent: Parent];
|
|||
async function toAssetRequireNode(
|
||||
[node]: Target,
|
||||
assetPath: string,
|
||||
filePath: string,
|
||||
context: Context,
|
||||
) {
|
||||
// MdxJsxTextElement => see https://github.com/facebook/docusaurus/pull/8288#discussion_r1125871405
|
||||
const jsxNode = node as unknown as MdxJsxTextElement;
|
||||
|
@ -53,7 +50,7 @@ async function toAssetRequireNode(
|
|||
|
||||
// require("assets/file.pdf") means requiring from a package called assets
|
||||
const relativeAssetPath = `./${posixPath(
|
||||
path.relative(path.dirname(filePath), assetPath),
|
||||
path.relative(path.dirname(context.filePath), assetPath),
|
||||
)}`;
|
||||
|
||||
const parsedUrl = url.parse(node.url);
|
||||
|
@ -65,7 +62,9 @@ async function toAssetRequireNode(
|
|||
path.extname(relativeAssetPath) === '.json'
|
||||
? `${relativeAssetPath.replace('.json', '.raw')}!=`
|
||||
: ''
|
||||
}${inlineMarkdownLinkFileLoader}${escapePath(relativeAssetPath) + search}`;
|
||||
}${context.inlineMarkdownLinkFileLoader}${
|
||||
escapePath(relativeAssetPath) + search
|
||||
}`;
|
||||
|
||||
attributes.push({
|
||||
type: 'mdxJsxAttribute',
|
||||
|
@ -196,7 +195,7 @@ async function processLinkNode(target: Target, context: Context) {
|
|||
context,
|
||||
);
|
||||
if (assetPath) {
|
||||
await toAssetRequireNode(target, assetPath, context.filePath);
|
||||
await toAssetRequireNode(target, assetPath, context);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -204,14 +203,19 @@ export default function plugin(options: PluginOptions): Transformer {
|
|||
return async (root, vfile) => {
|
||||
const {visit} = await import('unist-util-visit');
|
||||
|
||||
const fileLoaderUtils = getFileLoaderUtils(
|
||||
vfile.data.compilerName === 'server',
|
||||
);
|
||||
const context: Context = {
|
||||
...options,
|
||||
filePath: vfile.path!,
|
||||
inlineMarkdownLinkFileLoader:
|
||||
fileLoaderUtils.loaders.inlineMarkdownLinkFileLoader,
|
||||
};
|
||||
|
||||
const promises: Promise<void>[] = [];
|
||||
visit(root, 'link', (node: Link, index, parent) => {
|
||||
promises.push(
|
||||
processLinkNode([node, index, parent!], {
|
||||
...options,
|
||||
filePath: vfile.path!,
|
||||
}),
|
||||
);
|
||||
promises.push(processLinkNode([node, index, parent!], context));
|
||||
});
|
||||
await Promise.all(promises);
|
||||
};
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue