refactor(core): improve handling of server bundle (#10429)

This commit is contained in:
Sébastien Lorber 2024-08-21 17:34:18 +02:00 committed by GitHub
parent 1c56fa5830
commit 97bd815d9a
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
10 changed files with 106 additions and 54 deletions

2
.gitignore vendored
View file

@ -38,6 +38,8 @@ website/_dogfooding/_swizzle_theme_tests
CrowdinTranslations_*.zip CrowdinTranslations_*.zip
website/.cpu-prof
website/i18n/**/* website/i18n/**/*
#!website/i18n/fr #!website/i18n/fr
#!website/i18n/fr/**/* #!website/i18n/fr/**/*

View file

@ -28,6 +28,7 @@
"build:website:deployPreview:build": "cross-env NETLIFY=true CONTEXT='deploy-preview' yarn workspace website build", "build:website:deployPreview:build": "cross-env NETLIFY=true CONTEXT='deploy-preview' yarn workspace website build",
"build:website:deployPreview": "yarn build:website:deployPreview:testWrap && yarn build:website:deployPreview:build", "build:website:deployPreview": "yarn build:website:deployPreview:testWrap && yarn build:website:deployPreview:build",
"build:website:fast": "yarn workspace website build:fast", "build:website:fast": "yarn workspace website build:fast",
"build:website:fast:profile": "yarn workspace website build:fast:profile",
"build:website:en": "yarn workspace website build --locale en", "build:website:en": "yarn workspace website build --locale en",
"clear:website": "yarn workspace website clear", "clear:website": "yarn workspace website clear",
"serve:website": "yarn workspace website serve", "serve:website": "yarn workspace website serve",

View file

@ -28,10 +28,6 @@ import type {LoaderContext} from 'webpack';
// See https://github.com/microsoft/TypeScript/issues/49721#issuecomment-1517839391 // See https://github.com/microsoft/TypeScript/issues/49721#issuecomment-1517839391
type Pluggable = any; // TODO fix this asap type Pluggable = any; // TODO fix this asap
const {
loaders: {inlineMarkdownAssetImageFileLoader},
} = getFileLoaderUtils();
export type MDXPlugin = Pluggable; export type MDXPlugin = Pluggable;
export type Options = Partial<MDXOptions> & { export type Options = Partial<MDXOptions> & {
@ -72,7 +68,13 @@ async function readMetadataPath(metadataPath: string) {
* *
* `{image: "./myImage.png"}` => `{image: require("./myImage.png")}` * `{image: "./myImage.png"}` => `{image: require("./myImage.png")}`
*/ */
function createAssetsExportCode(assets: unknown) { function createAssetsExportCode({
assets,
inlineMarkdownAssetImageFileLoader,
}: {
assets: unknown;
inlineMarkdownAssetImageFileLoader: string;
}) {
if ( if (
typeof assets !== 'object' || typeof assets !== 'object' ||
!assets || !assets ||
@ -245,13 +247,23 @@ ${JSON.stringify(frontMatter, null, 2)}`;
? options.createAssets({frontMatter, metadata}) ? options.createAssets({frontMatter, metadata})
: undefined; : undefined;
const fileLoaderUtils = getFileLoaderUtils(compilerName === 'server');
// TODO use remark plugins to insert extra exports instead of string concat? // TODO use remark plugins to insert extra exports instead of string concat?
// cf how the toc is exported // cf how the toc is exported
const exportsCode = ` const exportsCode = `
export const frontMatter = ${stringifyObject(frontMatter)}; export const frontMatter = ${stringifyObject(frontMatter)};
export const contentTitle = ${stringifyObject(contentTitle)}; export const contentTitle = ${stringifyObject(contentTitle)};
${metadataJsonString ? `export const metadata = ${metadataJsonString};` : ''} ${metadataJsonString ? `export const metadata = ${metadataJsonString};` : ''}
${assets ? `export const assets = ${createAssetsExportCode(assets)};` : ''} ${
assets
? `export const assets = ${createAssetsExportCode({
assets,
inlineMarkdownAssetImageFileLoader:
fileLoaderUtils.loaders.inlineMarkdownAssetImageFileLoader,
})};`
: ''
}
`; `;
const code = ` const code = `

View file

@ -13,8 +13,8 @@ import {
toMessageRelativeFilePath, toMessageRelativeFilePath,
posixPath, posixPath,
escapePath, escapePath,
getFileLoaderUtils,
findAsyncSequential, findAsyncSequential,
getFileLoaderUtils,
} from '@docusaurus/utils'; } from '@docusaurus/utils';
import escapeHtml from 'escape-html'; import escapeHtml from 'escape-html';
import sizeOf from 'image-size'; import sizeOf from 'image-size';
@ -27,10 +27,6 @@ import type {MdxJsxTextElement} from 'mdast-util-mdx';
import type {Image} from 'mdast'; import type {Image} from 'mdast';
import type {Parent} from 'unist'; import type {Parent} from 'unist';
const {
loaders: {inlineMarkdownImageFileLoader},
} = getFileLoaderUtils();
type PluginOptions = { type PluginOptions = {
staticDirs: string[]; staticDirs: string[];
siteDir: string; siteDir: string;
@ -38,6 +34,7 @@ type PluginOptions = {
type Context = PluginOptions & { type Context = PluginOptions & {
filePath: string; filePath: string;
inlineMarkdownImageFileLoader: string;
}; };
type Target = [node: Image, index: number, parent: Parent]; type Target = [node: Image, index: number, parent: Parent];
@ -45,21 +42,21 @@ type Target = [node: Image, index: number, parent: Parent];
async function toImageRequireNode( async function toImageRequireNode(
[node]: Target, [node]: Target,
imagePath: string, imagePath: string,
filePath: string, context: Context,
) { ) {
// MdxJsxTextElement => see https://github.com/facebook/docusaurus/pull/8288#discussion_r1125871405 // MdxJsxTextElement => see https://github.com/facebook/docusaurus/pull/8288#discussion_r1125871405
const jsxNode = node as unknown as MdxJsxTextElement; const jsxNode = node as unknown as MdxJsxTextElement;
const attributes: MdxJsxTextElement['attributes'] = []; const attributes: MdxJsxTextElement['attributes'] = [];
let relativeImagePath = posixPath( let relativeImagePath = posixPath(
path.relative(path.dirname(filePath), imagePath), path.relative(path.dirname(context.filePath), imagePath),
); );
relativeImagePath = `./${relativeImagePath}`; relativeImagePath = `./${relativeImagePath}`;
const parsedUrl = url.parse(node.url); const parsedUrl = url.parse(node.url);
const hash = parsedUrl.hash ?? ''; const hash = parsedUrl.hash ?? '';
const search = parsedUrl.search ?? ''; const search = parsedUrl.search ?? '';
const requireString = `${inlineMarkdownImageFileLoader}${ const requireString = `${context.inlineMarkdownImageFileLoader}${
escapePath(relativeImagePath) + search escapePath(relativeImagePath) + search
}`; }`;
if (node.alt) { if (node.alt) {
@ -186,21 +183,26 @@ async function processImageNode(target: Target, context: Context) {
// We try to convert image urls without protocol to images with require calls // We try to convert image urls without protocol to images with require calls
// going through webpack ensures that image assets exist at build time // going through webpack ensures that image assets exist at build time
const imagePath = await getImageAbsolutePath(parsedUrl.pathname, context); const imagePath = await getImageAbsolutePath(parsedUrl.pathname, context);
await toImageRequireNode(target, imagePath, context.filePath); await toImageRequireNode(target, imagePath, context);
} }
export default function plugin(options: PluginOptions): Transformer { export default function plugin(options: PluginOptions): Transformer {
return async (root, vfile) => { return async (root, vfile) => {
const {visit} = await import('unist-util-visit'); const {visit} = await import('unist-util-visit');
const promises: Promise<void>[] = []; const fileLoaderUtils = getFileLoaderUtils(
visit(root, 'image', (node: Image, index, parent) => { vfile.data.compilerName === 'server',
promises.push( );
processImageNode([node, index, parent!], { const context: Context = {
...options, ...options,
filePath: vfile.path!, filePath: vfile.path!,
}), inlineMarkdownImageFileLoader:
); fileLoaderUtils.loaders.inlineMarkdownImageFileLoader,
};
const promises: Promise<void>[] = [];
visit(root, 'image', (node: Image, index, parent) => {
promises.push(processImageNode([node, index, parent!], context));
}); });
await Promise.all(promises); await Promise.all(promises);
}; };

View file

@ -12,8 +12,8 @@ import {
toMessageRelativeFilePath, toMessageRelativeFilePath,
posixPath, posixPath,
escapePath, escapePath,
getFileLoaderUtils,
findAsyncSequential, findAsyncSequential,
getFileLoaderUtils,
} from '@docusaurus/utils'; } from '@docusaurus/utils';
import escapeHtml from 'escape-html'; import escapeHtml from 'escape-html';
import {assetRequireAttributeValue, transformNode} from '../utils'; import {assetRequireAttributeValue, transformNode} from '../utils';
@ -24,10 +24,6 @@ import type {MdxJsxTextElement} from 'mdast-util-mdx';
import type {Parent} from 'unist'; import type {Parent} from 'unist';
import type {Link, Literal} from 'mdast'; import type {Link, Literal} from 'mdast';
const {
loaders: {inlineMarkdownLinkFileLoader},
} = getFileLoaderUtils();
type PluginOptions = { type PluginOptions = {
staticDirs: string[]; staticDirs: string[];
siteDir: string; siteDir: string;
@ -35,6 +31,7 @@ type PluginOptions = {
type Context = PluginOptions & { type Context = PluginOptions & {
filePath: string; filePath: string;
inlineMarkdownLinkFileLoader: string;
}; };
type Target = [node: Link, index: number, parent: Parent]; type Target = [node: Link, index: number, parent: Parent];
@ -45,7 +42,7 @@ type Target = [node: Link, index: number, parent: Parent];
async function toAssetRequireNode( async function toAssetRequireNode(
[node]: Target, [node]: Target,
assetPath: string, assetPath: string,
filePath: string, context: Context,
) { ) {
// MdxJsxTextElement => see https://github.com/facebook/docusaurus/pull/8288#discussion_r1125871405 // MdxJsxTextElement => see https://github.com/facebook/docusaurus/pull/8288#discussion_r1125871405
const jsxNode = node as unknown as MdxJsxTextElement; const jsxNode = node as unknown as MdxJsxTextElement;
@ -53,7 +50,7 @@ async function toAssetRequireNode(
// require("assets/file.pdf") means requiring from a package called assets // require("assets/file.pdf") means requiring from a package called assets
const relativeAssetPath = `./${posixPath( const relativeAssetPath = `./${posixPath(
path.relative(path.dirname(filePath), assetPath), path.relative(path.dirname(context.filePath), assetPath),
)}`; )}`;
const parsedUrl = url.parse(node.url); const parsedUrl = url.parse(node.url);
@ -65,7 +62,9 @@ async function toAssetRequireNode(
path.extname(relativeAssetPath) === '.json' path.extname(relativeAssetPath) === '.json'
? `${relativeAssetPath.replace('.json', '.raw')}!=` ? `${relativeAssetPath.replace('.json', '.raw')}!=`
: '' : ''
}${inlineMarkdownLinkFileLoader}${escapePath(relativeAssetPath) + search}`; }${context.inlineMarkdownLinkFileLoader}${
escapePath(relativeAssetPath) + search
}`;
attributes.push({ attributes.push({
type: 'mdxJsxAttribute', type: 'mdxJsxAttribute',
@ -196,7 +195,7 @@ async function processLinkNode(target: Target, context: Context) {
context, context,
); );
if (assetPath) { if (assetPath) {
await toAssetRequireNode(target, assetPath, context.filePath); await toAssetRequireNode(target, assetPath, context);
} }
} }
@ -204,14 +203,19 @@ export default function plugin(options: PluginOptions): Transformer {
return async (root, vfile) => { return async (root, vfile) => {
const {visit} = await import('unist-util-visit'); const {visit} = await import('unist-util-visit');
const promises: Promise<void>[] = []; const fileLoaderUtils = getFileLoaderUtils(
visit(root, 'link', (node: Link, index, parent) => { vfile.data.compilerName === 'server',
promises.push( );
processLinkNode([node, index, parent!], { const context: Context = {
...options, ...options,
filePath: vfile.path!, filePath: vfile.path!,
}), inlineMarkdownLinkFileLoader:
); fileLoaderUtils.loaders.inlineMarkdownLinkFileLoader,
};
const promises: Promise<void>[] = [];
visit(root, 'link', (node: Link, index, parent) => {
promises.push(processLinkNode([node, index, parent!], context));
}); });
await Promise.all(promises); await Promise.all(promises);
}; };

View file

@ -50,12 +50,13 @@ type FileLoaderUtils = {
}; };
}; };
/** // TODO this historical code is quite messy
* Returns unified loader configurations to be used for various file types. // We should try to get rid of it and move to assets pipeline
* function createFileLoaderUtils({
* Inspired by https://github.com/gatsbyjs/gatsby/blob/8e6e021014da310b9cc7d02e58c9b3efe938c665/packages/gatsby/src/utils/webpack-utils.ts#L447 isServer,
*/ }: {
export function getFileLoaderUtils(): FileLoaderUtils { isServer: boolean;
}): FileLoaderUtils {
// Files/images < urlLoaderLimit will be inlined as base64 strings directly in // Files/images < urlLoaderLimit will be inlined as base64 strings directly in
// the html // the html
const urlLoaderLimit = WEBPACK_URL_LOADER_LIMIT; const urlLoaderLimit = WEBPACK_URL_LOADER_LIMIT;
@ -72,6 +73,7 @@ export function getFileLoaderUtils(): FileLoaderUtils {
loader: require.resolve(`file-loader`), loader: require.resolve(`file-loader`),
options: { options: {
name: fileLoaderFileName(options.folder), name: fileLoaderFileName(options.folder),
emitFile: !isServer,
}, },
}), }),
url: (options: {folder: AssetFolder}) => ({ url: (options: {folder: AssetFolder}) => ({
@ -80,6 +82,7 @@ export function getFileLoaderUtils(): FileLoaderUtils {
limit: urlLoaderLimit, limit: urlLoaderLimit,
name: fileLoaderFileName(options.folder), name: fileLoaderFileName(options.folder),
fallback: require.resolve('file-loader'), fallback: require.resolve('file-loader'),
emitFile: !isServer,
}, },
}), }),
@ -92,13 +95,19 @@ export function getFileLoaderUtils(): FileLoaderUtils {
require.resolve('url-loader'), require.resolve('url-loader'),
)}?limit=${urlLoaderLimit}&name=${fileLoaderFileName( )}?limit=${urlLoaderLimit}&name=${fileLoaderFileName(
'images', 'images',
)}&fallback=${escapePath(require.resolve('file-loader'))}!`, )}&fallback=${escapePath(require.resolve('file-loader'))}${
isServer ? `&emitFile=false` : ''
}!`,
inlineMarkdownAssetImageFileLoader: `!${escapePath( inlineMarkdownAssetImageFileLoader: `!${escapePath(
require.resolve('file-loader'), require.resolve('file-loader'),
)}?name=${fileLoaderFileName('images')}!`, )}?name=${fileLoaderFileName('images')}${
isServer ? `&emitFile=false` : ''
}!`,
inlineMarkdownLinkFileLoader: `!${escapePath( inlineMarkdownLinkFileLoader: `!${escapePath(
require.resolve('file-loader'), require.resolve('file-loader'),
)}?name=${fileLoaderFileName('files')}!`, )}?name=${fileLoaderFileName('files')}${
isServer ? `&emitFile=false` : ''
}!`,
}; };
const rules: FileLoaderUtils['rules'] = { const rules: FileLoaderUtils['rules'] = {
@ -173,3 +182,16 @@ export function getFileLoaderUtils(): FileLoaderUtils {
return {loaders, rules}; return {loaders, rules};
} }
const FileLoaderUtilsMap = {
server: createFileLoaderUtils({isServer: true}),
client: createFileLoaderUtils({isServer: false}),
};
/**
* Returns unified loader configurations to be used for various file types.
* Inspired by https://github.com/gatsbyjs/gatsby/blob/8e6e021014da310b9cc7d02e58c9b3efe938c665/packages/gatsby/src/utils/webpack-utils.ts#L447
*/
export function getFileLoaderUtils(isServer: boolean): FileLoaderUtils {
return isServer ? FileLoaderUtilsMap.server : FileLoaderUtilsMap.client;
}

View file

@ -196,10 +196,7 @@ async function buildLocale({
}), }),
); );
// Remove server.bundle.js because it is not needed. await cleanupServerBundle(serverBundlePath);
await PerfLogger.async('Deleting server bundle', () =>
ensureUnlink(serverBundlePath),
);
// Plugin Lifecycle - postBuild. // Plugin Lifecycle - postBuild.
await PerfLogger.async('postBuild()', () => await PerfLogger.async('postBuild()', () =>
@ -361,8 +358,17 @@ async function getBuildServerConfig({props}: {props: Props}) {
return {serverConfig: config, serverBundlePath: result.serverBundlePath}; return {serverConfig: config, serverBundlePath: result.serverBundlePath};
} }
async function ensureUnlink(filepath: string) { // Remove /build/server server.bundle.js because it is not needed.
if (await fs.pathExists(filepath)) { async function cleanupServerBundle(serverBundlePath: string) {
await fs.unlink(filepath); if (process.env.DOCUSAURUS_KEEP_SERVER_BUNDLE === 'true') {
logger.warn(
"Will NOT delete server bundle because DOCUSAURUS_KEEP_SERVER_BUNDLE is set to 'true'",
);
} else {
await PerfLogger.async('Deleting server bundle', async () => {
// For now we assume server entry is at the root of the server out dir
const serverDir = path.dirname(serverBundlePath);
await fs.rm(serverDir, {recursive: true, force: true});
});
} }
} }

View file

@ -77,7 +77,7 @@ export async function createBaseConfig({
const isProd = process.env.NODE_ENV === 'production'; const isProd = process.env.NODE_ENV === 'production';
const minimizeEnabled = minify && isProd; const minimizeEnabled = minify && isProd;
const fileLoaderUtils = getFileLoaderUtils(); const fileLoaderUtils = getFileLoaderUtils(isServer);
const name = isServer ? 'server' : 'client'; const name = isServer ? 'server' : 'client';
const mode = isProd ? 'production' : 'development'; const mode = isProd ? 'production' : 'development';

View file

@ -27,7 +27,8 @@ export default async function createServerConfig(params: {
}); });
const outputFilename = 'server.bundle.js'; const outputFilename = 'server.bundle.js';
const serverBundlePath = path.join(props.outDir, outputFilename); const outputDir = path.join(props.outDir, '__server');
const serverBundlePath = path.join(outputDir, outputFilename);
const config = merge(baseConfig, { const config = merge(baseConfig, {
target: `node${NODE_MAJOR_VERSION}.${NODE_MINOR_VERSION}`, target: `node${NODE_MAJOR_VERSION}.${NODE_MINOR_VERSION}`,
@ -35,6 +36,7 @@ export default async function createServerConfig(params: {
main: path.resolve(__dirname, '../client/serverEntry.js'), main: path.resolve(__dirname, '../client/serverEntry.js'),
}, },
output: { output: {
path: outputDir,
filename: outputFilename, filename: outputFilename,
libraryTarget: 'commonjs2', libraryTarget: 'commonjs2',
// Workaround for Webpack 4 Bug (https://github.com/webpack/webpack/issues/6522) // Workaround for Webpack 4 Bug (https://github.com/webpack/webpack/issues/6522)

View file

@ -22,6 +22,7 @@
"start:blogOnly": "cross-env yarn start --config=docusaurus.config-blog-only.js", "start:blogOnly": "cross-env yarn start --config=docusaurus.config-blog-only.js",
"build:blogOnly": "cross-env yarn build --config=docusaurus.config-blog-only.js", "build:blogOnly": "cross-env yarn build --config=docusaurus.config-blog-only.js",
"build:fast": "cross-env BUILD_FAST=true yarn build --locale en", "build:fast": "cross-env BUILD_FAST=true yarn build --locale en",
"build:fast:profile": "cross-env BUILD_FAST=true node --cpu-prof --cpu-prof-dir .cpu-prof ./node_modules/.bin/docusaurus build --locale en",
"netlify:build:production": "yarn docusaurus write-translations && yarn netlify:crowdin:delay && yarn netlify:crowdin:uploadSources && yarn netlify:crowdin:downloadTranslations && yarn build && yarn test:css-order", "netlify:build:production": "yarn docusaurus write-translations && yarn netlify:crowdin:delay && yarn netlify:crowdin:uploadSources && yarn netlify:crowdin:downloadTranslations && yarn build && yarn test:css-order",
"netlify:build:branchDeploy": "yarn build && yarn test:css-order", "netlify:build:branchDeploy": "yarn build && yarn test:css-order",
"netlify:build:deployPreview": "yarn build && yarn test:css-order", "netlify:build:deployPreview": "yarn build && yarn test:css-order",