mirror of
https://github.com/facebook/docusaurus.git
synced 2025-04-28 17:57:48 +02:00
refactor(core): improve handling of server bundle (#10429)
This commit is contained in:
parent
1c56fa5830
commit
97bd815d9a
10 changed files with 106 additions and 54 deletions
2
.gitignore
vendored
2
.gitignore
vendored
|
@ -38,6 +38,8 @@ website/_dogfooding/_swizzle_theme_tests
|
|||
|
||||
CrowdinTranslations_*.zip
|
||||
|
||||
website/.cpu-prof
|
||||
|
||||
website/i18n/**/*
|
||||
#!website/i18n/fr
|
||||
#!website/i18n/fr/**/*
|
||||
|
|
|
@ -28,6 +28,7 @@
|
|||
"build:website:deployPreview:build": "cross-env NETLIFY=true CONTEXT='deploy-preview' yarn workspace website build",
|
||||
"build:website:deployPreview": "yarn build:website:deployPreview:testWrap && yarn build:website:deployPreview:build",
|
||||
"build:website:fast": "yarn workspace website build:fast",
|
||||
"build:website:fast:profile": "yarn workspace website build:fast:profile",
|
||||
"build:website:en": "yarn workspace website build --locale en",
|
||||
"clear:website": "yarn workspace website clear",
|
||||
"serve:website": "yarn workspace website serve",
|
||||
|
|
|
@ -28,10 +28,6 @@ import type {LoaderContext} from 'webpack';
|
|||
// See https://github.com/microsoft/TypeScript/issues/49721#issuecomment-1517839391
|
||||
type Pluggable = any; // TODO fix this asap
|
||||
|
||||
const {
|
||||
loaders: {inlineMarkdownAssetImageFileLoader},
|
||||
} = getFileLoaderUtils();
|
||||
|
||||
export type MDXPlugin = Pluggable;
|
||||
|
||||
export type Options = Partial<MDXOptions> & {
|
||||
|
@ -72,7 +68,13 @@ async function readMetadataPath(metadataPath: string) {
|
|||
*
|
||||
* `{image: "./myImage.png"}` => `{image: require("./myImage.png")}`
|
||||
*/
|
||||
function createAssetsExportCode(assets: unknown) {
|
||||
function createAssetsExportCode({
|
||||
assets,
|
||||
inlineMarkdownAssetImageFileLoader,
|
||||
}: {
|
||||
assets: unknown;
|
||||
inlineMarkdownAssetImageFileLoader: string;
|
||||
}) {
|
||||
if (
|
||||
typeof assets !== 'object' ||
|
||||
!assets ||
|
||||
|
@ -245,13 +247,23 @@ ${JSON.stringify(frontMatter, null, 2)}`;
|
|||
? options.createAssets({frontMatter, metadata})
|
||||
: undefined;
|
||||
|
||||
const fileLoaderUtils = getFileLoaderUtils(compilerName === 'server');
|
||||
|
||||
// TODO use remark plugins to insert extra exports instead of string concat?
|
||||
// cf how the toc is exported
|
||||
const exportsCode = `
|
||||
export const frontMatter = ${stringifyObject(frontMatter)};
|
||||
export const contentTitle = ${stringifyObject(contentTitle)};
|
||||
${metadataJsonString ? `export const metadata = ${metadataJsonString};` : ''}
|
||||
${assets ? `export const assets = ${createAssetsExportCode(assets)};` : ''}
|
||||
${
|
||||
assets
|
||||
? `export const assets = ${createAssetsExportCode({
|
||||
assets,
|
||||
inlineMarkdownAssetImageFileLoader:
|
||||
fileLoaderUtils.loaders.inlineMarkdownAssetImageFileLoader,
|
||||
})};`
|
||||
: ''
|
||||
}
|
||||
`;
|
||||
|
||||
const code = `
|
||||
|
|
|
@ -13,8 +13,8 @@ import {
|
|||
toMessageRelativeFilePath,
|
||||
posixPath,
|
||||
escapePath,
|
||||
getFileLoaderUtils,
|
||||
findAsyncSequential,
|
||||
getFileLoaderUtils,
|
||||
} from '@docusaurus/utils';
|
||||
import escapeHtml from 'escape-html';
|
||||
import sizeOf from 'image-size';
|
||||
|
@ -27,10 +27,6 @@ import type {MdxJsxTextElement} from 'mdast-util-mdx';
|
|||
import type {Image} from 'mdast';
|
||||
import type {Parent} from 'unist';
|
||||
|
||||
const {
|
||||
loaders: {inlineMarkdownImageFileLoader},
|
||||
} = getFileLoaderUtils();
|
||||
|
||||
type PluginOptions = {
|
||||
staticDirs: string[];
|
||||
siteDir: string;
|
||||
|
@ -38,6 +34,7 @@ type PluginOptions = {
|
|||
|
||||
type Context = PluginOptions & {
|
||||
filePath: string;
|
||||
inlineMarkdownImageFileLoader: string;
|
||||
};
|
||||
|
||||
type Target = [node: Image, index: number, parent: Parent];
|
||||
|
@ -45,21 +42,21 @@ type Target = [node: Image, index: number, parent: Parent];
|
|||
async function toImageRequireNode(
|
||||
[node]: Target,
|
||||
imagePath: string,
|
||||
filePath: string,
|
||||
context: Context,
|
||||
) {
|
||||
// MdxJsxTextElement => see https://github.com/facebook/docusaurus/pull/8288#discussion_r1125871405
|
||||
const jsxNode = node as unknown as MdxJsxTextElement;
|
||||
const attributes: MdxJsxTextElement['attributes'] = [];
|
||||
|
||||
let relativeImagePath = posixPath(
|
||||
path.relative(path.dirname(filePath), imagePath),
|
||||
path.relative(path.dirname(context.filePath), imagePath),
|
||||
);
|
||||
relativeImagePath = `./${relativeImagePath}`;
|
||||
|
||||
const parsedUrl = url.parse(node.url);
|
||||
const hash = parsedUrl.hash ?? '';
|
||||
const search = parsedUrl.search ?? '';
|
||||
const requireString = `${inlineMarkdownImageFileLoader}${
|
||||
const requireString = `${context.inlineMarkdownImageFileLoader}${
|
||||
escapePath(relativeImagePath) + search
|
||||
}`;
|
||||
if (node.alt) {
|
||||
|
@ -186,21 +183,26 @@ async function processImageNode(target: Target, context: Context) {
|
|||
// We try to convert image urls without protocol to images with require calls
|
||||
// going through webpack ensures that image assets exist at build time
|
||||
const imagePath = await getImageAbsolutePath(parsedUrl.pathname, context);
|
||||
await toImageRequireNode(target, imagePath, context.filePath);
|
||||
await toImageRequireNode(target, imagePath, context);
|
||||
}
|
||||
|
||||
export default function plugin(options: PluginOptions): Transformer {
|
||||
return async (root, vfile) => {
|
||||
const {visit} = await import('unist-util-visit');
|
||||
|
||||
const promises: Promise<void>[] = [];
|
||||
visit(root, 'image', (node: Image, index, parent) => {
|
||||
promises.push(
|
||||
processImageNode([node, index, parent!], {
|
||||
const fileLoaderUtils = getFileLoaderUtils(
|
||||
vfile.data.compilerName === 'server',
|
||||
);
|
||||
const context: Context = {
|
||||
...options,
|
||||
filePath: vfile.path!,
|
||||
}),
|
||||
);
|
||||
inlineMarkdownImageFileLoader:
|
||||
fileLoaderUtils.loaders.inlineMarkdownImageFileLoader,
|
||||
};
|
||||
|
||||
const promises: Promise<void>[] = [];
|
||||
visit(root, 'image', (node: Image, index, parent) => {
|
||||
promises.push(processImageNode([node, index, parent!], context));
|
||||
});
|
||||
await Promise.all(promises);
|
||||
};
|
||||
|
|
|
@ -12,8 +12,8 @@ import {
|
|||
toMessageRelativeFilePath,
|
||||
posixPath,
|
||||
escapePath,
|
||||
getFileLoaderUtils,
|
||||
findAsyncSequential,
|
||||
getFileLoaderUtils,
|
||||
} from '@docusaurus/utils';
|
||||
import escapeHtml from 'escape-html';
|
||||
import {assetRequireAttributeValue, transformNode} from '../utils';
|
||||
|
@ -24,10 +24,6 @@ import type {MdxJsxTextElement} from 'mdast-util-mdx';
|
|||
import type {Parent} from 'unist';
|
||||
import type {Link, Literal} from 'mdast';
|
||||
|
||||
const {
|
||||
loaders: {inlineMarkdownLinkFileLoader},
|
||||
} = getFileLoaderUtils();
|
||||
|
||||
type PluginOptions = {
|
||||
staticDirs: string[];
|
||||
siteDir: string;
|
||||
|
@ -35,6 +31,7 @@ type PluginOptions = {
|
|||
|
||||
type Context = PluginOptions & {
|
||||
filePath: string;
|
||||
inlineMarkdownLinkFileLoader: string;
|
||||
};
|
||||
|
||||
type Target = [node: Link, index: number, parent: Parent];
|
||||
|
@ -45,7 +42,7 @@ type Target = [node: Link, index: number, parent: Parent];
|
|||
async function toAssetRequireNode(
|
||||
[node]: Target,
|
||||
assetPath: string,
|
||||
filePath: string,
|
||||
context: Context,
|
||||
) {
|
||||
// MdxJsxTextElement => see https://github.com/facebook/docusaurus/pull/8288#discussion_r1125871405
|
||||
const jsxNode = node as unknown as MdxJsxTextElement;
|
||||
|
@ -53,7 +50,7 @@ async function toAssetRequireNode(
|
|||
|
||||
// require("assets/file.pdf") means requiring from a package called assets
|
||||
const relativeAssetPath = `./${posixPath(
|
||||
path.relative(path.dirname(filePath), assetPath),
|
||||
path.relative(path.dirname(context.filePath), assetPath),
|
||||
)}`;
|
||||
|
||||
const parsedUrl = url.parse(node.url);
|
||||
|
@ -65,7 +62,9 @@ async function toAssetRequireNode(
|
|||
path.extname(relativeAssetPath) === '.json'
|
||||
? `${relativeAssetPath.replace('.json', '.raw')}!=`
|
||||
: ''
|
||||
}${inlineMarkdownLinkFileLoader}${escapePath(relativeAssetPath) + search}`;
|
||||
}${context.inlineMarkdownLinkFileLoader}${
|
||||
escapePath(relativeAssetPath) + search
|
||||
}`;
|
||||
|
||||
attributes.push({
|
||||
type: 'mdxJsxAttribute',
|
||||
|
@ -196,7 +195,7 @@ async function processLinkNode(target: Target, context: Context) {
|
|||
context,
|
||||
);
|
||||
if (assetPath) {
|
||||
await toAssetRequireNode(target, assetPath, context.filePath);
|
||||
await toAssetRequireNode(target, assetPath, context);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -204,14 +203,19 @@ export default function plugin(options: PluginOptions): Transformer {
|
|||
return async (root, vfile) => {
|
||||
const {visit} = await import('unist-util-visit');
|
||||
|
||||
const promises: Promise<void>[] = [];
|
||||
visit(root, 'link', (node: Link, index, parent) => {
|
||||
promises.push(
|
||||
processLinkNode([node, index, parent!], {
|
||||
const fileLoaderUtils = getFileLoaderUtils(
|
||||
vfile.data.compilerName === 'server',
|
||||
);
|
||||
const context: Context = {
|
||||
...options,
|
||||
filePath: vfile.path!,
|
||||
}),
|
||||
);
|
||||
inlineMarkdownLinkFileLoader:
|
||||
fileLoaderUtils.loaders.inlineMarkdownLinkFileLoader,
|
||||
};
|
||||
|
||||
const promises: Promise<void>[] = [];
|
||||
visit(root, 'link', (node: Link, index, parent) => {
|
||||
promises.push(processLinkNode([node, index, parent!], context));
|
||||
});
|
||||
await Promise.all(promises);
|
||||
};
|
||||
|
|
|
@ -50,12 +50,13 @@ type FileLoaderUtils = {
|
|||
};
|
||||
};
|
||||
|
||||
/**
|
||||
* Returns unified loader configurations to be used for various file types.
|
||||
*
|
||||
* Inspired by https://github.com/gatsbyjs/gatsby/blob/8e6e021014da310b9cc7d02e58c9b3efe938c665/packages/gatsby/src/utils/webpack-utils.ts#L447
|
||||
*/
|
||||
export function getFileLoaderUtils(): FileLoaderUtils {
|
||||
// TODO this historical code is quite messy
|
||||
// We should try to get rid of it and move to assets pipeline
|
||||
function createFileLoaderUtils({
|
||||
isServer,
|
||||
}: {
|
||||
isServer: boolean;
|
||||
}): FileLoaderUtils {
|
||||
// Files/images < urlLoaderLimit will be inlined as base64 strings directly in
|
||||
// the html
|
||||
const urlLoaderLimit = WEBPACK_URL_LOADER_LIMIT;
|
||||
|
@ -72,6 +73,7 @@ export function getFileLoaderUtils(): FileLoaderUtils {
|
|||
loader: require.resolve(`file-loader`),
|
||||
options: {
|
||||
name: fileLoaderFileName(options.folder),
|
||||
emitFile: !isServer,
|
||||
},
|
||||
}),
|
||||
url: (options: {folder: AssetFolder}) => ({
|
||||
|
@ -80,6 +82,7 @@ export function getFileLoaderUtils(): FileLoaderUtils {
|
|||
limit: urlLoaderLimit,
|
||||
name: fileLoaderFileName(options.folder),
|
||||
fallback: require.resolve('file-loader'),
|
||||
emitFile: !isServer,
|
||||
},
|
||||
}),
|
||||
|
||||
|
@ -92,13 +95,19 @@ export function getFileLoaderUtils(): FileLoaderUtils {
|
|||
require.resolve('url-loader'),
|
||||
)}?limit=${urlLoaderLimit}&name=${fileLoaderFileName(
|
||||
'images',
|
||||
)}&fallback=${escapePath(require.resolve('file-loader'))}!`,
|
||||
)}&fallback=${escapePath(require.resolve('file-loader'))}${
|
||||
isServer ? `&emitFile=false` : ''
|
||||
}!`,
|
||||
inlineMarkdownAssetImageFileLoader: `!${escapePath(
|
||||
require.resolve('file-loader'),
|
||||
)}?name=${fileLoaderFileName('images')}!`,
|
||||
)}?name=${fileLoaderFileName('images')}${
|
||||
isServer ? `&emitFile=false` : ''
|
||||
}!`,
|
||||
inlineMarkdownLinkFileLoader: `!${escapePath(
|
||||
require.resolve('file-loader'),
|
||||
)}?name=${fileLoaderFileName('files')}!`,
|
||||
)}?name=${fileLoaderFileName('files')}${
|
||||
isServer ? `&emitFile=false` : ''
|
||||
}!`,
|
||||
};
|
||||
|
||||
const rules: FileLoaderUtils['rules'] = {
|
||||
|
@ -173,3 +182,16 @@ export function getFileLoaderUtils(): FileLoaderUtils {
|
|||
|
||||
return {loaders, rules};
|
||||
}
|
||||
|
||||
const FileLoaderUtilsMap = {
|
||||
server: createFileLoaderUtils({isServer: true}),
|
||||
client: createFileLoaderUtils({isServer: false}),
|
||||
};
|
||||
|
||||
/**
|
||||
* Returns unified loader configurations to be used for various file types.
|
||||
* Inspired by https://github.com/gatsbyjs/gatsby/blob/8e6e021014da310b9cc7d02e58c9b3efe938c665/packages/gatsby/src/utils/webpack-utils.ts#L447
|
||||
*/
|
||||
export function getFileLoaderUtils(isServer: boolean): FileLoaderUtils {
|
||||
return isServer ? FileLoaderUtilsMap.server : FileLoaderUtilsMap.client;
|
||||
}
|
||||
|
|
|
@ -196,10 +196,7 @@ async function buildLocale({
|
|||
}),
|
||||
);
|
||||
|
||||
// Remove server.bundle.js because it is not needed.
|
||||
await PerfLogger.async('Deleting server bundle', () =>
|
||||
ensureUnlink(serverBundlePath),
|
||||
);
|
||||
await cleanupServerBundle(serverBundlePath);
|
||||
|
||||
// Plugin Lifecycle - postBuild.
|
||||
await PerfLogger.async('postBuild()', () =>
|
||||
|
@ -361,8 +358,17 @@ async function getBuildServerConfig({props}: {props: Props}) {
|
|||
return {serverConfig: config, serverBundlePath: result.serverBundlePath};
|
||||
}
|
||||
|
||||
async function ensureUnlink(filepath: string) {
|
||||
if (await fs.pathExists(filepath)) {
|
||||
await fs.unlink(filepath);
|
||||
// Remove /build/server server.bundle.js because it is not needed.
|
||||
async function cleanupServerBundle(serverBundlePath: string) {
|
||||
if (process.env.DOCUSAURUS_KEEP_SERVER_BUNDLE === 'true') {
|
||||
logger.warn(
|
||||
"Will NOT delete server bundle because DOCUSAURUS_KEEP_SERVER_BUNDLE is set to 'true'",
|
||||
);
|
||||
} else {
|
||||
await PerfLogger.async('Deleting server bundle', async () => {
|
||||
// For now we assume server entry is at the root of the server out dir
|
||||
const serverDir = path.dirname(serverBundlePath);
|
||||
await fs.rm(serverDir, {recursive: true, force: true});
|
||||
});
|
||||
}
|
||||
}
|
||||
|
|
|
@ -77,7 +77,7 @@ export async function createBaseConfig({
|
|||
const isProd = process.env.NODE_ENV === 'production';
|
||||
const minimizeEnabled = minify && isProd;
|
||||
|
||||
const fileLoaderUtils = getFileLoaderUtils();
|
||||
const fileLoaderUtils = getFileLoaderUtils(isServer);
|
||||
|
||||
const name = isServer ? 'server' : 'client';
|
||||
const mode = isProd ? 'production' : 'development';
|
||||
|
|
|
@ -27,7 +27,8 @@ export default async function createServerConfig(params: {
|
|||
});
|
||||
|
||||
const outputFilename = 'server.bundle.js';
|
||||
const serverBundlePath = path.join(props.outDir, outputFilename);
|
||||
const outputDir = path.join(props.outDir, '__server');
|
||||
const serverBundlePath = path.join(outputDir, outputFilename);
|
||||
|
||||
const config = merge(baseConfig, {
|
||||
target: `node${NODE_MAJOR_VERSION}.${NODE_MINOR_VERSION}`,
|
||||
|
@ -35,6 +36,7 @@ export default async function createServerConfig(params: {
|
|||
main: path.resolve(__dirname, '../client/serverEntry.js'),
|
||||
},
|
||||
output: {
|
||||
path: outputDir,
|
||||
filename: outputFilename,
|
||||
libraryTarget: 'commonjs2',
|
||||
// Workaround for Webpack 4 Bug (https://github.com/webpack/webpack/issues/6522)
|
||||
|
|
|
@ -22,6 +22,7 @@
|
|||
"start:blogOnly": "cross-env yarn start --config=docusaurus.config-blog-only.js",
|
||||
"build:blogOnly": "cross-env yarn build --config=docusaurus.config-blog-only.js",
|
||||
"build:fast": "cross-env BUILD_FAST=true yarn build --locale en",
|
||||
"build:fast:profile": "cross-env BUILD_FAST=true node --cpu-prof --cpu-prof-dir .cpu-prof ./node_modules/.bin/docusaurus build --locale en",
|
||||
"netlify:build:production": "yarn docusaurus write-translations && yarn netlify:crowdin:delay && yarn netlify:crowdin:uploadSources && yarn netlify:crowdin:downloadTranslations && yarn build && yarn test:css-order",
|
||||
"netlify:build:branchDeploy": "yarn build && yarn test:css-order",
|
||||
"netlify:build:deployPreview": "yarn build && yarn test:css-order",
|
||||
|
|
Loading…
Add table
Reference in a new issue