mirror of
https://github.com/facebook/docusaurus.git
synced 2025-05-17 19:16:58 +02:00
refactor(core): improve dev perf, fine-grained site reloads - part1 (#9903)
This commit is contained in:
parent
eb6424cc27
commit
d02b96f7f5
45 changed files with 2100 additions and 1332 deletions
|
@ -7,7 +7,7 @@
|
|||
|
||||
import {jest} from '@jest/globals';
|
||||
import path from 'path';
|
||||
import {loadContext} from '@docusaurus/core/src/server/index';
|
||||
import {loadContext} from '@docusaurus/core/src/server/site';
|
||||
import {createSlugger, posixPath, DEFAULT_PLUGIN_ID} from '@docusaurus/utils';
|
||||
import {createSidebarsUtils} from '../sidebars/utils';
|
||||
import {
|
||||
|
|
|
@ -12,9 +12,9 @@ import _ from 'lodash';
|
|||
import {isMatch} from 'picomatch';
|
||||
import commander from 'commander';
|
||||
import webpack from 'webpack';
|
||||
import {loadContext} from '@docusaurus/core/src/server/index';
|
||||
import {loadContext} from '@docusaurus/core/src/server/site';
|
||||
import {applyConfigureWebpack} from '@docusaurus/core/src/webpack/utils';
|
||||
import {sortConfig} from '@docusaurus/core/src/server/plugins/routeConfig';
|
||||
import {sortRoutes} from '@docusaurus/core/src/server/plugins/routeConfig';
|
||||
import {posixPath} from '@docusaurus/utils';
|
||||
import {normalizePluginOptions} from '@docusaurus/utils-validation';
|
||||
|
||||
|
@ -109,7 +109,7 @@ Entries created:
|
|||
expectSnapshot: () => {
|
||||
// Sort the route config like in src/server/plugins/index.ts for
|
||||
// consistent snapshot ordering
|
||||
sortConfig(routeConfigs);
|
||||
sortRoutes(routeConfigs);
|
||||
expect(routeConfigs).not.toEqual([]);
|
||||
expect(routeConfigs).toMatchSnapshot('route config');
|
||||
expect(dataContainer).toMatchSnapshot('data');
|
||||
|
|
|
@ -6,7 +6,7 @@
|
|||
*/
|
||||
|
||||
import path from 'path';
|
||||
import {loadContext} from '@docusaurus/core/lib/server';
|
||||
import {loadContext} from '@docusaurus/core/src/server/site';
|
||||
import {normalizePluginOptions} from '@docusaurus/utils-validation';
|
||||
|
||||
import pluginContentPages from '../index';
|
||||
|
|
9
packages/docusaurus-types/src/plugin.d.ts
vendored
9
packages/docusaurus-types/src/plugin.d.ts
vendored
|
@ -163,6 +163,15 @@ export type Plugin<Content = unknown> = {
|
|||
}) => ThemeConfig;
|
||||
};
|
||||
|
||||
/**
|
||||
* Data required to uniquely identify a plugin
|
||||
* The name or instance id alone is not enough
|
||||
*/
|
||||
export type PluginIdentifier = {
|
||||
readonly name: string;
|
||||
readonly id: string;
|
||||
};
|
||||
|
||||
export type InitializedPlugin = Plugin & {
|
||||
readonly options: Required<PluginOptions>;
|
||||
readonly version: PluginVersionInformation;
|
||||
|
|
|
@ -10,7 +10,7 @@ import path from 'path';
|
|||
import _ from 'lodash';
|
||||
import logger from '@docusaurus/logger';
|
||||
import {DOCUSAURUS_VERSION, mapAsyncSequential} from '@docusaurus/utils';
|
||||
import {load, loadContext, type LoadContextOptions} from '../server';
|
||||
import {loadSite, loadContext, type LoadContextParams} from '../server/site';
|
||||
import {handleBrokenLinks} from '../server/brokenLinks';
|
||||
|
||||
import {createBuildClientConfig} from '../webpack/client';
|
||||
|
@ -32,7 +32,7 @@ import type {LoadedPlugin, Props} from '@docusaurus/types';
|
|||
import type {SiteCollectedData} from '../common';
|
||||
|
||||
export type BuildCLIOptions = Pick<
|
||||
LoadContextOptions,
|
||||
LoadContextParams,
|
||||
'config' | 'locale' | 'outDir'
|
||||
> & {
|
||||
bundleAnalyzer?: boolean;
|
||||
|
@ -161,7 +161,7 @@ async function buildLocale({
|
|||
logger.info`name=${`[${locale}]`} Creating an optimized production build...`;
|
||||
|
||||
PerfLogger.start('Loading site');
|
||||
const props: Props = await load({
|
||||
const site = await loadSite({
|
||||
siteDir,
|
||||
outDir: cliOptions.outDir,
|
||||
config: cliOptions.config,
|
||||
|
@ -170,7 +170,7 @@ async function buildLocale({
|
|||
});
|
||||
PerfLogger.end('Loading site');
|
||||
|
||||
// Apply user webpack config.
|
||||
const {props} = site;
|
||||
const {outDir, plugins} = props;
|
||||
|
||||
// We can build the 2 configs in parallel
|
||||
|
|
|
@ -11,11 +11,11 @@ import os from 'os';
|
|||
import logger from '@docusaurus/logger';
|
||||
import shell from 'shelljs';
|
||||
import {hasSSHProtocol, buildSshUrl, buildHttpsUrl} from '@docusaurus/utils';
|
||||
import {loadContext, type LoadContextOptions} from '../server';
|
||||
import {loadContext, type LoadContextParams} from '../server/site';
|
||||
import {build} from './build';
|
||||
|
||||
export type DeployCLIOptions = Pick<
|
||||
LoadContextOptions,
|
||||
LoadContextParams,
|
||||
'config' | 'locale' | 'outDir'
|
||||
> & {
|
||||
skipBuild?: boolean;
|
||||
|
|
|
@ -6,7 +6,7 @@
|
|||
*/
|
||||
|
||||
import fs from 'fs-extra';
|
||||
import {loadContext} from '../server';
|
||||
import {loadContext} from '../server/site';
|
||||
import {initPlugins} from '../server/plugins/init';
|
||||
import type {CommanderStatic} from 'commander';
|
||||
|
||||
|
|
|
@ -15,10 +15,10 @@ import openBrowser from 'react-dev-utils/openBrowser';
|
|||
import {loadSiteConfig} from '../server/config';
|
||||
import {build} from './build';
|
||||
import {getHostPort, type HostPortOptions} from '../server/getHostPort';
|
||||
import type {LoadContextOptions} from '../server';
|
||||
import type {LoadContextParams} from '../server/site';
|
||||
|
||||
export type ServeCLIOptions = HostPortOptions &
|
||||
Pick<LoadContextOptions, 'config'> & {
|
||||
Pick<LoadContextParams, 'config'> & {
|
||||
dir?: string;
|
||||
build?: boolean;
|
||||
open?: boolean;
|
||||
|
|
|
@ -1,321 +0,0 @@
|
|||
/**
|
||||
* Copyright (c) Facebook, Inc. and its affiliates.
|
||||
*
|
||||
* This source code is licensed under the MIT license found in the
|
||||
* LICENSE file in the root directory of this source tree.
|
||||
*/
|
||||
|
||||
import fs from 'fs-extra';
|
||||
import path from 'path';
|
||||
import _ from 'lodash';
|
||||
import logger from '@docusaurus/logger';
|
||||
import {normalizeUrl, posixPath} from '@docusaurus/utils';
|
||||
import chokidar from 'chokidar';
|
||||
import openBrowser from 'react-dev-utils/openBrowser';
|
||||
import {prepareUrls} from 'react-dev-utils/WebpackDevServerUtils';
|
||||
import evalSourceMapMiddleware from 'react-dev-utils/evalSourceMapMiddleware';
|
||||
import webpack from 'webpack';
|
||||
import WebpackDevServer from 'webpack-dev-server';
|
||||
import merge from 'webpack-merge';
|
||||
import {load, type LoadContextOptions} from '../server';
|
||||
import {createStartClientConfig} from '../webpack/client';
|
||||
import {
|
||||
getHttpsConfig,
|
||||
formatStatsErrorMessage,
|
||||
printStatsWarnings,
|
||||
executePluginsConfigurePostCss,
|
||||
executePluginsConfigureWebpack,
|
||||
} from '../webpack/utils';
|
||||
import {getHostPort, type HostPortOptions} from '../server/getHostPort';
|
||||
import {PerfLogger} from '../utils';
|
||||
import type {Compiler} from 'webpack';
|
||||
import type {Props} from '@docusaurus/types';
|
||||
|
||||
export type StartCLIOptions = HostPortOptions &
|
||||
Pick<LoadContextOptions, 'locale' | 'config'> & {
|
||||
hotOnly?: boolean;
|
||||
open?: boolean;
|
||||
poll?: boolean | number;
|
||||
minify?: boolean;
|
||||
};
|
||||
|
||||
export async function start(
|
||||
siteDirParam: string = '.',
|
||||
cliOptions: Partial<StartCLIOptions> = {},
|
||||
): Promise<void> {
|
||||
// Temporary workaround to unlock the ability to translate the site config
|
||||
// We'll remove it if a better official API can be designed
|
||||
// See https://github.com/facebook/docusaurus/issues/4542
|
||||
process.env.DOCUSAURUS_CURRENT_LOCALE = cliOptions.locale;
|
||||
|
||||
const siteDir = await fs.realpath(siteDirParam);
|
||||
|
||||
logger.info('Starting the development server...');
|
||||
|
||||
async function loadSite() {
|
||||
PerfLogger.start('Loading site');
|
||||
const result = await load({
|
||||
siteDir,
|
||||
config: cliOptions.config,
|
||||
locale: cliOptions.locale,
|
||||
localizePath: undefined, // Should this be configurable?
|
||||
});
|
||||
PerfLogger.end('Loading site');
|
||||
return result;
|
||||
}
|
||||
|
||||
// Process all related files as a prop.
|
||||
const props = await loadSite();
|
||||
|
||||
const {host, port, getOpenUrl} = await createUrlUtils({cliOptions});
|
||||
const openUrl = getOpenUrl({baseUrl: props.baseUrl});
|
||||
|
||||
logger.success`Docusaurus website is running at: url=${openUrl}`;
|
||||
|
||||
// Reload files processing.
|
||||
const reload = _.debounce(() => {
|
||||
loadSite()
|
||||
.then(({baseUrl: newBaseUrl}) => {
|
||||
const newOpenUrl = getOpenUrl({baseUrl: newBaseUrl});
|
||||
if (newOpenUrl !== openUrl) {
|
||||
logger.success`Docusaurus website is running at: url=${newOpenUrl}`;
|
||||
}
|
||||
})
|
||||
.catch((err: Error) => {
|
||||
logger.error(err.stack);
|
||||
});
|
||||
}, 500);
|
||||
|
||||
// TODO this is historically not optimized!
|
||||
// When any site file changes, we reload absolutely everything :/
|
||||
// At least we should try to reload only one plugin individually?
|
||||
setupFileWatchers({
|
||||
props,
|
||||
cliOptions,
|
||||
onFileChange: () => {
|
||||
reload();
|
||||
},
|
||||
});
|
||||
|
||||
const config = await getStartClientConfig({
|
||||
props,
|
||||
minify: cliOptions.minify ?? true,
|
||||
poll: cliOptions.poll,
|
||||
});
|
||||
|
||||
const compiler = webpack(config);
|
||||
registerE2ETestHook(compiler);
|
||||
|
||||
const defaultDevServerConfig = await createDevServerConfig({
|
||||
cliOptions,
|
||||
props,
|
||||
host,
|
||||
port,
|
||||
});
|
||||
|
||||
// Allow plugin authors to customize/override devServer config
|
||||
const devServerConfig: WebpackDevServer.Configuration = merge(
|
||||
[defaultDevServerConfig, config.devServer].filter(Boolean),
|
||||
);
|
||||
|
||||
const devServer = new WebpackDevServer(devServerConfig, compiler);
|
||||
devServer.startCallback(() => {
|
||||
if (cliOptions.open) {
|
||||
openBrowser(openUrl);
|
||||
}
|
||||
});
|
||||
|
||||
['SIGINT', 'SIGTERM'].forEach((sig) => {
|
||||
process.on(sig, () => {
|
||||
devServer.stop();
|
||||
process.exit();
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
function createPollingOptions({cliOptions}: {cliOptions: StartCLIOptions}) {
|
||||
return {
|
||||
usePolling: !!cliOptions.poll,
|
||||
interval: Number.isInteger(cliOptions.poll)
|
||||
? (cliOptions.poll as number)
|
||||
: undefined,
|
||||
};
|
||||
}
|
||||
|
||||
function setupFileWatchers({
|
||||
props,
|
||||
cliOptions,
|
||||
onFileChange,
|
||||
}: {
|
||||
props: Props;
|
||||
cliOptions: StartCLIOptions;
|
||||
onFileChange: () => void;
|
||||
}) {
|
||||
const {siteDir} = props;
|
||||
const pathsToWatch = getPathsToWatch({props});
|
||||
|
||||
const pollingOptions = createPollingOptions({cliOptions});
|
||||
const fsWatcher = chokidar.watch(pathsToWatch, {
|
||||
cwd: siteDir,
|
||||
ignoreInitial: true,
|
||||
...{pollingOptions},
|
||||
});
|
||||
|
||||
['add', 'change', 'unlink', 'addDir', 'unlinkDir'].forEach((event) =>
|
||||
fsWatcher.on(event, onFileChange),
|
||||
);
|
||||
}
|
||||
|
||||
function getPathsToWatch({props}: {props: Props}): string[] {
|
||||
const {siteDir, siteConfigPath, plugins, localizationDir} = props;
|
||||
|
||||
const normalizeToSiteDir = (filepath: string) => {
|
||||
if (filepath && path.isAbsolute(filepath)) {
|
||||
return posixPath(path.relative(siteDir, filepath));
|
||||
}
|
||||
return posixPath(filepath);
|
||||
};
|
||||
|
||||
const pluginsPaths = plugins
|
||||
.flatMap((plugin) => plugin.getPathsToWatch?.() ?? [])
|
||||
.filter(Boolean)
|
||||
.map(normalizeToSiteDir);
|
||||
|
||||
return [...pluginsPaths, siteConfigPath, localizationDir];
|
||||
}
|
||||
|
||||
async function createUrlUtils({cliOptions}: {cliOptions: StartCLIOptions}) {
|
||||
const protocol: string = process.env.HTTPS === 'true' ? 'https' : 'http';
|
||||
|
||||
const {host, port} = await getHostPort(cliOptions);
|
||||
if (port === null) {
|
||||
return process.exit();
|
||||
}
|
||||
|
||||
const getOpenUrl = ({baseUrl}: {baseUrl: string}) => {
|
||||
const urls = prepareUrls(protocol, host, port);
|
||||
return normalizeUrl([urls.localUrlForBrowser, baseUrl]);
|
||||
};
|
||||
|
||||
return {host, port, getOpenUrl};
|
||||
}
|
||||
|
||||
async function createDevServerConfig({
|
||||
cliOptions,
|
||||
props,
|
||||
host,
|
||||
port,
|
||||
}: {
|
||||
cliOptions: StartCLIOptions;
|
||||
props: Props;
|
||||
host: string;
|
||||
port: number;
|
||||
}): Promise<WebpackDevServer.Configuration> {
|
||||
const {baseUrl, siteDir, siteConfig} = props;
|
||||
|
||||
const pollingOptions = createPollingOptions({cliOptions});
|
||||
|
||||
const httpsConfig = await getHttpsConfig();
|
||||
|
||||
// https://webpack.js.org/configuration/dev-server
|
||||
return {
|
||||
hot: cliOptions.hotOnly ? 'only' : true,
|
||||
liveReload: false,
|
||||
client: {
|
||||
progress: true,
|
||||
overlay: {
|
||||
warnings: false,
|
||||
errors: true,
|
||||
},
|
||||
webSocketURL: {
|
||||
hostname: '0.0.0.0',
|
||||
port: 0,
|
||||
},
|
||||
},
|
||||
headers: {
|
||||
'access-control-allow-origin': '*',
|
||||
},
|
||||
devMiddleware: {
|
||||
publicPath: baseUrl,
|
||||
// Reduce log verbosity, see https://github.com/facebook/docusaurus/pull/5420#issuecomment-906613105
|
||||
stats: 'summary',
|
||||
},
|
||||
static: siteConfig.staticDirectories.map((dir) => ({
|
||||
publicPath: baseUrl,
|
||||
directory: path.resolve(siteDir, dir),
|
||||
watch: {
|
||||
// Useful options for our own monorepo using symlinks!
|
||||
// See https://github.com/webpack/webpack/issues/11612#issuecomment-879259806
|
||||
followSymlinks: true,
|
||||
ignored: /node_modules\/(?!@docusaurus)/,
|
||||
...{pollingOptions},
|
||||
},
|
||||
})),
|
||||
...(httpsConfig && {
|
||||
server:
|
||||
typeof httpsConfig === 'object'
|
||||
? {
|
||||
type: 'https',
|
||||
options: httpsConfig,
|
||||
}
|
||||
: 'https',
|
||||
}),
|
||||
historyApiFallback: {
|
||||
rewrites: [{from: /\/*/, to: baseUrl}],
|
||||
},
|
||||
allowedHosts: 'all',
|
||||
host,
|
||||
port,
|
||||
setupMiddlewares: (middlewares, devServer) => {
|
||||
// This lets us fetch source contents from webpack for the error overlay.
|
||||
middlewares.unshift(evalSourceMapMiddleware(devServer));
|
||||
return middlewares;
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
// E2E_TEST=true docusaurus start
|
||||
// Makes "docusaurus start" exit immediately on success/error, for E2E test
|
||||
function registerE2ETestHook(compiler: Compiler) {
|
||||
compiler.hooks.done.tap('done', (stats) => {
|
||||
const errorsWarnings = stats.toJson('errors-warnings');
|
||||
const statsErrorMessage = formatStatsErrorMessage(errorsWarnings);
|
||||
if (statsErrorMessage) {
|
||||
console.error(statsErrorMessage);
|
||||
}
|
||||
printStatsWarnings(errorsWarnings);
|
||||
if (process.env.E2E_TEST) {
|
||||
if (stats.hasErrors()) {
|
||||
logger.error('E2E_TEST: Project has compiler errors.');
|
||||
process.exit(1);
|
||||
}
|
||||
logger.success('E2E_TEST: Project can compile.');
|
||||
process.exit(0);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
async function getStartClientConfig({
|
||||
props,
|
||||
minify,
|
||||
poll,
|
||||
}: {
|
||||
props: Props;
|
||||
minify: boolean;
|
||||
poll: number | boolean | undefined;
|
||||
}) {
|
||||
const {plugins, siteConfig} = props;
|
||||
let {clientConfig: config} = await createStartClientConfig({
|
||||
props,
|
||||
minify,
|
||||
poll,
|
||||
});
|
||||
config = executePluginsConfigurePostCss({plugins, config});
|
||||
config = executePluginsConfigureWebpack({
|
||||
plugins,
|
||||
config,
|
||||
isServer: false,
|
||||
jsLoader: siteConfig.webpack?.jsLoader,
|
||||
});
|
||||
return config;
|
||||
}
|
64
packages/docusaurus/src/commands/start/start.ts
Normal file
64
packages/docusaurus/src/commands/start/start.ts
Normal file
|
@ -0,0 +1,64 @@
|
|||
/**
|
||||
* Copyright (c) Facebook, Inc. and its affiliates.
|
||||
*
|
||||
* This source code is licensed under the MIT license found in the
|
||||
* LICENSE file in the root directory of this source tree.
|
||||
*/
|
||||
|
||||
import logger from '@docusaurus/logger';
|
||||
import openBrowser from 'react-dev-utils/openBrowser';
|
||||
import {setupSiteFileWatchers} from './watcher';
|
||||
import {createWebpackDevServer} from './webpack';
|
||||
import {createReloadableSite} from './utils';
|
||||
import type {LoadContextParams} from '../../server/site';
|
||||
import type {HostPortOptions} from '../../server/getHostPort';
|
||||
|
||||
export type StartCLIOptions = HostPortOptions &
|
||||
Pick<LoadContextParams, 'locale' | 'config'> & {
|
||||
hotOnly?: boolean;
|
||||
open?: boolean;
|
||||
poll?: boolean | number;
|
||||
minify?: boolean;
|
||||
};
|
||||
|
||||
export async function start(
|
||||
siteDirParam: string = '.',
|
||||
cliOptions: Partial<StartCLIOptions> = {},
|
||||
): Promise<void> {
|
||||
logger.info('Starting the development server...');
|
||||
// Temporary workaround to unlock the ability to translate the site config
|
||||
// We'll remove it if a better official API can be designed
|
||||
// See https://github.com/facebook/docusaurus/issues/4542
|
||||
process.env.DOCUSAURUS_CURRENT_LOCALE = cliOptions.locale;
|
||||
|
||||
const reloadableSite = await createReloadableSite({siteDirParam, cliOptions});
|
||||
|
||||
setupSiteFileWatchers(
|
||||
{props: reloadableSite.get().props, cliOptions},
|
||||
({plugin}) => {
|
||||
if (plugin) {
|
||||
reloadableSite.reloadPlugin(plugin);
|
||||
} else {
|
||||
reloadableSite.reload();
|
||||
}
|
||||
},
|
||||
);
|
||||
|
||||
const devServer = await createWebpackDevServer({
|
||||
props: reloadableSite.get().props,
|
||||
cliOptions,
|
||||
openUrlContext: reloadableSite.openUrlContext,
|
||||
});
|
||||
|
||||
['SIGINT', 'SIGTERM'].forEach((sig) => {
|
||||
process.on(sig, () => {
|
||||
devServer.stop();
|
||||
process.exit();
|
||||
});
|
||||
});
|
||||
|
||||
await devServer.start();
|
||||
if (cliOptions.open) {
|
||||
openBrowser(reloadableSite.getOpenUrl());
|
||||
}
|
||||
}
|
126
packages/docusaurus/src/commands/start/utils.ts
Normal file
126
packages/docusaurus/src/commands/start/utils.ts
Normal file
|
@ -0,0 +1,126 @@
|
|||
/**
|
||||
* Copyright (c) Facebook, Inc. and its affiliates.
|
||||
*
|
||||
* This source code is licensed under the MIT license found in the
|
||||
* LICENSE file in the root directory of this source tree.
|
||||
*/
|
||||
|
||||
import fs from 'fs-extra';
|
||||
import _ from 'lodash';
|
||||
import {prepareUrls} from 'react-dev-utils/WebpackDevServerUtils';
|
||||
import {normalizeUrl} from '@docusaurus/utils';
|
||||
import logger from '@docusaurus/logger';
|
||||
import {getHostPort} from '../../server/getHostPort';
|
||||
import {PerfLogger} from '../../utils';
|
||||
import {
|
||||
loadSite,
|
||||
type LoadSiteParams,
|
||||
reloadSite,
|
||||
reloadSitePlugin,
|
||||
} from '../../server/site';
|
||||
import type {StartCLIOptions} from './start';
|
||||
import type {LoadedPlugin} from '@docusaurus/types';
|
||||
|
||||
export type OpenUrlContext = {
|
||||
host: string;
|
||||
port: number;
|
||||
getOpenUrl: ({baseUrl}: {baseUrl: string}) => string;
|
||||
};
|
||||
|
||||
export async function createOpenUrlContext({
|
||||
cliOptions,
|
||||
}: {
|
||||
cliOptions: StartCLIOptions;
|
||||
}): Promise<OpenUrlContext> {
|
||||
const protocol: string = process.env.HTTPS === 'true' ? 'https' : 'http';
|
||||
|
||||
const {host, port} = await getHostPort(cliOptions);
|
||||
if (port === null) {
|
||||
return process.exit();
|
||||
}
|
||||
|
||||
const getOpenUrl: OpenUrlContext['getOpenUrl'] = ({baseUrl}) => {
|
||||
const urls = prepareUrls(protocol, host, port);
|
||||
return normalizeUrl([urls.localUrlForBrowser, baseUrl]);
|
||||
};
|
||||
|
||||
return {host, port, getOpenUrl};
|
||||
}
|
||||
|
||||
type StartParams = {
|
||||
siteDirParam: string;
|
||||
cliOptions: Partial<StartCLIOptions>;
|
||||
};
|
||||
|
||||
async function createLoadSiteParams({
|
||||
siteDirParam,
|
||||
cliOptions,
|
||||
}: StartParams): Promise<LoadSiteParams> {
|
||||
const siteDir = await fs.realpath(siteDirParam);
|
||||
return {
|
||||
siteDir,
|
||||
config: cliOptions.config,
|
||||
locale: cliOptions.locale,
|
||||
localizePath: undefined, // Should this be configurable?
|
||||
};
|
||||
}
|
||||
|
||||
// eslint-disable-next-line @typescript-eslint/explicit-module-boundary-types
|
||||
export async function createReloadableSite(startParams: StartParams) {
|
||||
const openUrlContext = await createOpenUrlContext(startParams);
|
||||
|
||||
let site = await PerfLogger.async('Loading site', async () => {
|
||||
const params = await createLoadSiteParams(startParams);
|
||||
return loadSite(params);
|
||||
});
|
||||
|
||||
const get = () => site;
|
||||
|
||||
const getOpenUrl = () =>
|
||||
openUrlContext.getOpenUrl({
|
||||
baseUrl: site.props.baseUrl,
|
||||
});
|
||||
|
||||
const printOpenUrlMessage = () => {
|
||||
logger.success`Docusaurus website is running at: url=${getOpenUrl()}`;
|
||||
};
|
||||
printOpenUrlMessage();
|
||||
|
||||
const reloadBase = async () => {
|
||||
try {
|
||||
const oldSite = site;
|
||||
site = await PerfLogger.async('Reloading site', () => reloadSite(site));
|
||||
if (oldSite.props.baseUrl !== site.props.baseUrl) {
|
||||
printOpenUrlMessage();
|
||||
}
|
||||
} catch (e) {
|
||||
logger.error('Site reload failure');
|
||||
console.error(e);
|
||||
}
|
||||
};
|
||||
|
||||
// TODO instead of debouncing we should rather add AbortController support?
|
||||
const reload = _.debounce(reloadBase, 500);
|
||||
|
||||
// TODO this could be subject to plugin reloads race conditions
|
||||
// In practice, it is not likely the user will hot reload 2 plugins at once
|
||||
// but we should still support it and probably use a task queuing system
|
||||
const reloadPlugin = async (plugin: LoadedPlugin) => {
|
||||
try {
|
||||
site = await PerfLogger.async(
|
||||
`Reloading site plugin ${plugin.name}@${plugin.options.id}`,
|
||||
() => {
|
||||
const pluginIdentifier = {name: plugin.name, id: plugin.options.id};
|
||||
return reloadSitePlugin(site, pluginIdentifier);
|
||||
},
|
||||
);
|
||||
} catch (e) {
|
||||
logger.error(
|
||||
`Site plugin reload failure - Plugin ${plugin.name}@${plugin.options.id}`,
|
||||
);
|
||||
console.error(e);
|
||||
}
|
||||
};
|
||||
|
||||
return {get, getOpenUrl, reload, reloadPlugin, openUrlContext};
|
||||
}
|
135
packages/docusaurus/src/commands/start/watcher.ts
Normal file
135
packages/docusaurus/src/commands/start/watcher.ts
Normal file
|
@ -0,0 +1,135 @@
|
|||
/**
|
||||
* Copyright (c) Facebook, Inc. and its affiliates.
|
||||
*
|
||||
* This source code is licensed under the MIT license found in the
|
||||
* LICENSE file in the root directory of this source tree.
|
||||
*/
|
||||
|
||||
import path from 'path';
|
||||
import chokidar from 'chokidar';
|
||||
import {posixPath} from '@docusaurus/utils';
|
||||
import type {StartCLIOptions} from './start';
|
||||
import type {LoadedPlugin, Props} from '@docusaurus/types';
|
||||
|
||||
type PollingOptions = {
|
||||
usePolling: boolean;
|
||||
interval: number | undefined;
|
||||
};
|
||||
|
||||
export function createPollingOptions(
|
||||
cliOptions: StartCLIOptions,
|
||||
): PollingOptions {
|
||||
return {
|
||||
usePolling: !!cliOptions.poll,
|
||||
interval: Number.isInteger(cliOptions.poll)
|
||||
? (cliOptions.poll as number)
|
||||
: undefined,
|
||||
};
|
||||
}
|
||||
|
||||
export type FileWatchEventName =
|
||||
| 'add'
|
||||
| 'addDir'
|
||||
| 'change'
|
||||
| 'unlink'
|
||||
| 'unlinkDir';
|
||||
|
||||
export type FileWatchEvent = {
|
||||
name: FileWatchEventName;
|
||||
path: string;
|
||||
};
|
||||
|
||||
type WatchParams = {
|
||||
pathsToWatch: string[];
|
||||
siteDir: string;
|
||||
} & PollingOptions;
|
||||
|
||||
/**
|
||||
* Watch file system paths for changes and emit events
|
||||
* Returns an async handle to stop watching
|
||||
*/
|
||||
export function watch(
|
||||
params: WatchParams,
|
||||
callback: (event: FileWatchEvent) => void,
|
||||
): () => Promise<void> {
|
||||
const {pathsToWatch, siteDir, ...options} = params;
|
||||
|
||||
const fsWatcher = chokidar.watch(pathsToWatch, {
|
||||
cwd: siteDir,
|
||||
ignoreInitial: true,
|
||||
...options,
|
||||
});
|
||||
|
||||
fsWatcher.on('all', (name, eventPath) => callback({name, path: eventPath}));
|
||||
|
||||
return () => fsWatcher.close();
|
||||
}
|
||||
|
||||
export function getSitePathsToWatch({props}: {props: Props}): string[] {
|
||||
return [
|
||||
// TODO we should also watch all imported modules!
|
||||
// Use https://github.com/vercel/nft ?
|
||||
props.siteConfigPath,
|
||||
props.localizationDir,
|
||||
];
|
||||
}
|
||||
|
||||
export function getPluginPathsToWatch({
|
||||
siteDir,
|
||||
plugin,
|
||||
}: {
|
||||
siteDir: string;
|
||||
plugin: LoadedPlugin;
|
||||
}): string[] {
|
||||
const normalizeToSiteDir = (filepath: string) => {
|
||||
if (filepath && path.isAbsolute(filepath)) {
|
||||
return posixPath(path.relative(siteDir, filepath));
|
||||
}
|
||||
return posixPath(filepath);
|
||||
};
|
||||
|
||||
return (plugin.getPathsToWatch?.() ?? [])
|
||||
.filter(Boolean)
|
||||
.map(normalizeToSiteDir);
|
||||
}
|
||||
|
||||
export function setupSiteFileWatchers(
|
||||
{
|
||||
props,
|
||||
cliOptions,
|
||||
}: {
|
||||
props: Props;
|
||||
cliOptions: StartCLIOptions;
|
||||
},
|
||||
callback: (params: {
|
||||
plugin: LoadedPlugin | null;
|
||||
event: FileWatchEvent;
|
||||
}) => void,
|
||||
): void {
|
||||
const {siteDir} = props;
|
||||
const pollingOptions = createPollingOptions(cliOptions);
|
||||
|
||||
// TODO on config / or local plugin updates,
|
||||
// the getFilePathsToWatch lifecycle code might get updated
|
||||
// so we should probably reset the watchers?
|
||||
|
||||
watch(
|
||||
{
|
||||
pathsToWatch: getSitePathsToWatch({props}),
|
||||
siteDir: props.siteDir,
|
||||
...pollingOptions,
|
||||
},
|
||||
(event) => callback({plugin: null, event}),
|
||||
);
|
||||
|
||||
props.plugins.forEach((plugin) => {
|
||||
watch(
|
||||
{
|
||||
pathsToWatch: getPluginPathsToWatch({plugin, siteDir}),
|
||||
siteDir,
|
||||
...pollingOptions,
|
||||
},
|
||||
(event) => callback({plugin, event}),
|
||||
);
|
||||
});
|
||||
}
|
179
packages/docusaurus/src/commands/start/webpack.ts
Normal file
179
packages/docusaurus/src/commands/start/webpack.ts
Normal file
|
@ -0,0 +1,179 @@
|
|||
/**
|
||||
* Copyright (c) Facebook, Inc. and its affiliates.
|
||||
*
|
||||
* This source code is licensed under the MIT license found in the
|
||||
* LICENSE file in the root directory of this source tree.
|
||||
*/
|
||||
|
||||
import path from 'path';
|
||||
import merge from 'webpack-merge';
|
||||
import webpack from 'webpack';
|
||||
import logger from '@docusaurus/logger';
|
||||
import WebpackDevServer from 'webpack-dev-server';
|
||||
import evalSourceMapMiddleware from 'react-dev-utils/evalSourceMapMiddleware';
|
||||
import {createPollingOptions} from './watcher';
|
||||
import {
|
||||
executePluginsConfigurePostCss,
|
||||
executePluginsConfigureWebpack,
|
||||
formatStatsErrorMessage,
|
||||
getHttpsConfig,
|
||||
printStatsWarnings,
|
||||
} from '../../webpack/utils';
|
||||
import {createStartClientConfig} from '../../webpack/client';
|
||||
import type {StartCLIOptions} from './start';
|
||||
import type {Props} from '@docusaurus/types';
|
||||
import type {Compiler} from 'webpack';
|
||||
import type {OpenUrlContext} from './utils';
|
||||
|
||||
// E2E_TEST=true docusaurus start
|
||||
// Makes "docusaurus start" exit immediately on success/error, for E2E test
|
||||
function registerWebpackE2ETestHook(compiler: Compiler) {
|
||||
compiler.hooks.done.tap('done', (stats) => {
|
||||
const errorsWarnings = stats.toJson('errors-warnings');
|
||||
const statsErrorMessage = formatStatsErrorMessage(errorsWarnings);
|
||||
if (statsErrorMessage) {
|
||||
console.error(statsErrorMessage);
|
||||
}
|
||||
printStatsWarnings(errorsWarnings);
|
||||
if (process.env.E2E_TEST) {
|
||||
if (stats.hasErrors()) {
|
||||
logger.error('E2E_TEST: Project has compiler errors.');
|
||||
process.exit(1);
|
||||
}
|
||||
logger.success('E2E_TEST: Project can compile.');
|
||||
process.exit(0);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
async function createDevServerConfig({
|
||||
cliOptions,
|
||||
props,
|
||||
host,
|
||||
port,
|
||||
}: {
|
||||
cliOptions: StartCLIOptions;
|
||||
props: Props;
|
||||
host: string;
|
||||
port: number;
|
||||
}): Promise<WebpackDevServer.Configuration> {
|
||||
const {baseUrl, siteDir, siteConfig} = props;
|
||||
|
||||
const pollingOptions = createPollingOptions(cliOptions);
|
||||
|
||||
const httpsConfig = await getHttpsConfig();
|
||||
|
||||
// https://webpack.js.org/configuration/dev-server
|
||||
return {
|
||||
hot: cliOptions.hotOnly ? 'only' : true,
|
||||
liveReload: false,
|
||||
client: {
|
||||
progress: true,
|
||||
overlay: {
|
||||
warnings: false,
|
||||
errors: true,
|
||||
},
|
||||
webSocketURL: {
|
||||
hostname: '0.0.0.0',
|
||||
port: 0,
|
||||
},
|
||||
},
|
||||
headers: {
|
||||
'access-control-allow-origin': '*',
|
||||
},
|
||||
devMiddleware: {
|
||||
publicPath: baseUrl,
|
||||
// Reduce log verbosity, see https://github.com/facebook/docusaurus/pull/5420#issuecomment-906613105
|
||||
stats: 'summary',
|
||||
},
|
||||
static: siteConfig.staticDirectories.map((dir) => ({
|
||||
publicPath: baseUrl,
|
||||
directory: path.resolve(siteDir, dir),
|
||||
watch: {
|
||||
// Useful options for our own monorepo using symlinks!
|
||||
// See https://github.com/webpack/webpack/issues/11612#issuecomment-879259806
|
||||
followSymlinks: true,
|
||||
ignored: /node_modules\/(?!@docusaurus)/,
|
||||
...{pollingOptions},
|
||||
},
|
||||
})),
|
||||
...(httpsConfig && {
|
||||
server:
|
||||
typeof httpsConfig === 'object'
|
||||
? {
|
||||
type: 'https',
|
||||
options: httpsConfig,
|
||||
}
|
||||
: 'https',
|
||||
}),
|
||||
historyApiFallback: {
|
||||
rewrites: [{from: /\/*/, to: baseUrl}],
|
||||
},
|
||||
allowedHosts: 'all',
|
||||
host,
|
||||
port,
|
||||
setupMiddlewares: (middlewares, devServer) => {
|
||||
// This lets us fetch source contents from webpack for the error overlay.
|
||||
middlewares.unshift(evalSourceMapMiddleware(devServer));
|
||||
return middlewares;
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
async function getStartClientConfig({
|
||||
props,
|
||||
minify,
|
||||
poll,
|
||||
}: {
|
||||
props: Props;
|
||||
minify: boolean;
|
||||
poll: number | boolean | undefined;
|
||||
}) {
|
||||
const {plugins, siteConfig} = props;
|
||||
let {clientConfig: config} = await createStartClientConfig({
|
||||
props,
|
||||
minify,
|
||||
poll,
|
||||
});
|
||||
config = executePluginsConfigurePostCss({plugins, config});
|
||||
config = executePluginsConfigureWebpack({
|
||||
plugins,
|
||||
config,
|
||||
isServer: false,
|
||||
jsLoader: siteConfig.webpack?.jsLoader,
|
||||
});
|
||||
return config;
|
||||
}
|
||||
|
||||
export async function createWebpackDevServer({
|
||||
props,
|
||||
cliOptions,
|
||||
openUrlContext,
|
||||
}: {
|
||||
props: Props;
|
||||
cliOptions: StartCLIOptions;
|
||||
openUrlContext: OpenUrlContext;
|
||||
}): Promise<WebpackDevServer> {
|
||||
const config = await getStartClientConfig({
|
||||
props,
|
||||
minify: cliOptions.minify ?? true,
|
||||
poll: cliOptions.poll,
|
||||
});
|
||||
|
||||
const compiler = webpack(config);
|
||||
registerWebpackE2ETestHook(compiler);
|
||||
|
||||
const defaultDevServerConfig = await createDevServerConfig({
|
||||
cliOptions,
|
||||
props,
|
||||
host: openUrlContext.host,
|
||||
port: openUrlContext.port,
|
||||
});
|
||||
|
||||
// Allow plugin authors to customize/override devServer config
|
||||
const devServerConfig: WebpackDevServer.Configuration = merge(
|
||||
[defaultDevServerConfig, config.devServer].filter(Boolean),
|
||||
);
|
||||
|
||||
return new WebpackDevServer(devServerConfig, compiler);
|
||||
}
|
|
@ -5,7 +5,7 @@
|
|||
* LICENSE file in the root directory of this source tree.
|
||||
*/
|
||||
|
||||
import {loadContext} from '../../server';
|
||||
import {loadContext} from '../../server/site';
|
||||
import {initPlugins} from '../../server/plugins/init';
|
||||
import {loadPluginConfigs} from '../../server/plugins/configs';
|
||||
import type {SwizzleCLIOptions, SwizzleContext} from './common';
|
||||
|
|
|
@ -11,7 +11,7 @@ import {
|
|||
writeMarkdownHeadingId,
|
||||
type WriteHeadingIDOptions,
|
||||
} from '@docusaurus/utils';
|
||||
import {loadContext} from '../server';
|
||||
import {loadContext} from '../server/site';
|
||||
import {initPlugins} from '../server/plugins/init';
|
||||
import {safeGlobby} from '../server/utils';
|
||||
|
||||
|
|
|
@ -7,7 +7,7 @@
|
|||
|
||||
import fs from 'fs-extra';
|
||||
import path from 'path';
|
||||
import {loadContext, type LoadContextOptions} from '../server';
|
||||
import {loadContext, type LoadContextParams} from '../server/site';
|
||||
import {initPlugins} from '../server/plugins/init';
|
||||
import {
|
||||
writePluginTranslations,
|
||||
|
@ -24,7 +24,7 @@ import {getCustomBabelConfigFilePath, getBabelOptions} from '../webpack/utils';
|
|||
import type {InitializedPlugin} from '@docusaurus/types';
|
||||
|
||||
export type WriteTranslationsCLIOptions = Pick<
|
||||
LoadContextOptions,
|
||||
LoadContextParams,
|
||||
'config' | 'locale'
|
||||
> &
|
||||
WriteTranslationsOptions;
|
||||
|
|
|
@ -10,7 +10,7 @@ export {clear} from './commands/clear';
|
|||
export {deploy} from './commands/deploy';
|
||||
export {externalCommand} from './commands/external';
|
||||
export {serve} from './commands/serve';
|
||||
export {start} from './commands/start';
|
||||
export {start} from './commands/start/start';
|
||||
export {swizzle} from './commands/swizzle';
|
||||
export {writeHeadingIds} from './commands/writeHeadingIds';
|
||||
export {writeTranslations} from './commands/writeTranslations';
|
||||
|
|
|
@ -6,89 +6,30 @@
|
|||
*/
|
||||
|
||||
import {jest} from '@jest/globals';
|
||||
import {loadRoutes, handleDuplicateRoutes, genChunkName} from '../routes';
|
||||
import {getAllFinalRoutes, handleDuplicateRoutes} from '../routes';
|
||||
import type {RouteConfig} from '@docusaurus/types';
|
||||
|
||||
describe('genChunkName', () => {
|
||||
it('works', () => {
|
||||
const firstAssert: {[key: string]: string} = {
|
||||
'/docs/adding-blog': 'docs-adding-blog-062',
|
||||
'/docs/versioning': 'docs-versioning-8a8',
|
||||
'/': 'index',
|
||||
'/blog/2018/04/30/How-I-Converted-Profilo-To-Docusaurus':
|
||||
'blog-2018-04-30-how-i-converted-profilo-to-docusaurus-4f2',
|
||||
'/youtube': 'youtube-429',
|
||||
'/users/en/': 'users-en-f7a',
|
||||
'/blog': 'blog-c06',
|
||||
};
|
||||
Object.keys(firstAssert).forEach((str) => {
|
||||
expect(genChunkName(str)).toBe(firstAssert[str]);
|
||||
});
|
||||
});
|
||||
|
||||
it("doesn't allow different chunk name for same path", () => {
|
||||
expect(genChunkName('path/is/similar', 'oldPrefix')).toEqual(
|
||||
genChunkName('path/is/similar', 'newPrefix'),
|
||||
);
|
||||
});
|
||||
|
||||
it('emits different chunk names for different paths even with same preferred name', () => {
|
||||
const secondAssert: {[key: string]: string} = {
|
||||
'/blog/1': 'blog-85-f-089',
|
||||
'/blog/2': 'blog-353-489',
|
||||
};
|
||||
Object.keys(secondAssert).forEach((str) => {
|
||||
expect(genChunkName(str, undefined, 'blog')).toBe(secondAssert[str]);
|
||||
});
|
||||
});
|
||||
|
||||
it('only generates short unique IDs', () => {
|
||||
const thirdAssert: {[key: string]: string} = {
|
||||
a: '0cc175b9',
|
||||
b: '92eb5ffe',
|
||||
c: '4a8a08f0',
|
||||
d: '8277e091',
|
||||
};
|
||||
Object.keys(thirdAssert).forEach((str) => {
|
||||
expect(genChunkName(str, undefined, undefined, true)).toBe(
|
||||
thirdAssert[str],
|
||||
);
|
||||
});
|
||||
expect(genChunkName('d', undefined, undefined, true)).toBe('8277e091');
|
||||
});
|
||||
|
||||
// https://github.com/facebook/docusaurus/issues/8536
|
||||
it('avoids hash collisions', () => {
|
||||
expect(
|
||||
genChunkName(
|
||||
'@site/blog/2022-11-18-bye-medium/index.mdx?truncated=true',
|
||||
'content',
|
||||
'blog',
|
||||
false,
|
||||
),
|
||||
).not.toBe(
|
||||
genChunkName(
|
||||
'@site/blog/2019-10-05-react-nfc/index.mdx?truncated=true',
|
||||
'content',
|
||||
'blog',
|
||||
false,
|
||||
),
|
||||
);
|
||||
expect(
|
||||
genChunkName(
|
||||
'@site/blog/2022-11-18-bye-medium/index.mdx?truncated=true',
|
||||
'content',
|
||||
'blog',
|
||||
true,
|
||||
),
|
||||
).not.toBe(
|
||||
genChunkName(
|
||||
'@site/blog/2019-10-05-react-nfc/index.mdx?truncated=true',
|
||||
'content',
|
||||
'blog',
|
||||
true,
|
||||
),
|
||||
);
|
||||
describe('getAllFinalRoutes', () => {
|
||||
it('gets final routes correctly', () => {
|
||||
const routes: RouteConfig[] = [
|
||||
{
|
||||
path: '/docs',
|
||||
component: '',
|
||||
routes: [
|
||||
{path: '/docs/someDoc', component: ''},
|
||||
{path: '/docs/someOtherDoc', component: ''},
|
||||
],
|
||||
},
|
||||
{
|
||||
path: '/community',
|
||||
component: '',
|
||||
},
|
||||
];
|
||||
expect(getAllFinalRoutes(routes)).toEqual([
|
||||
routes[0]!.routes![0],
|
||||
routes[0]!.routes![1],
|
||||
routes[1],
|
||||
]);
|
||||
});
|
||||
});
|
||||
|
||||
|
@ -127,117 +68,16 @@ describe('handleDuplicateRoutes', () => {
|
|||
it('works', () => {
|
||||
expect(() => {
|
||||
handleDuplicateRoutes(routes, 'throw');
|
||||
}).toThrowErrorMatchingSnapshot();
|
||||
}).toThrowErrorMatchingInlineSnapshot(`
|
||||
"Duplicate routes found!
|
||||
- Attempting to create page at /search, but a page already exists at this route.
|
||||
- Attempting to create page at /sameDoc, but a page already exists at this route.
|
||||
- Attempting to create page at /, but a page already exists at this route.
|
||||
- Attempting to create page at /, but a page already exists at this route.
|
||||
This could lead to non-deterministic routing behavior."
|
||||
`);
|
||||
const consoleMock = jest.spyOn(console, 'log').mockImplementation(() => {});
|
||||
handleDuplicateRoutes(routes, 'ignore');
|
||||
expect(consoleMock).toHaveBeenCalledTimes(0);
|
||||
});
|
||||
});
|
||||
|
||||
describe('loadRoutes', () => {
|
||||
it('loads nested route config', () => {
|
||||
const nestedRouteConfig: RouteConfig = {
|
||||
component: '@theme/DocRoot',
|
||||
path: '/docs:route',
|
||||
modules: {
|
||||
docsMetadata: 'docs-b5f.json',
|
||||
},
|
||||
routes: [
|
||||
{
|
||||
path: '/docs/hello',
|
||||
component: '@theme/DocItem',
|
||||
exact: true,
|
||||
modules: {
|
||||
content: 'docs/hello.md',
|
||||
metadata: 'docs-hello-da2.json',
|
||||
},
|
||||
context: {
|
||||
plugin: 'pluginRouteContextModule-100.json',
|
||||
},
|
||||
sidebar: 'main',
|
||||
},
|
||||
{
|
||||
path: 'docs/foo/baz',
|
||||
component: '@theme/DocItem',
|
||||
modules: {
|
||||
content: 'docs/foo/baz.md',
|
||||
metadata: 'docs-foo-baz-dd9.json',
|
||||
},
|
||||
context: {
|
||||
plugin: 'pluginRouteContextModule-100.json',
|
||||
},
|
||||
sidebar: 'secondary',
|
||||
'key:a': 'containing colon',
|
||||
"key'b": 'containing quote',
|
||||
'key"c': 'containing double quote',
|
||||
'key,d': 'containing comma',
|
||||
字段: 'containing unicode',
|
||||
},
|
||||
],
|
||||
};
|
||||
expect(loadRoutes([nestedRouteConfig], '/', 'ignore')).toMatchSnapshot();
|
||||
});
|
||||
|
||||
it('loads flat route config', () => {
|
||||
const flatRouteConfig: RouteConfig = {
|
||||
path: '/blog',
|
||||
component: '@theme/BlogListPage',
|
||||
exact: true,
|
||||
modules: {
|
||||
items: [
|
||||
{
|
||||
content: {
|
||||
__import: true,
|
||||
path: 'blog/2018-12-14-Happy-First-Birthday-Slash.md',
|
||||
query: {
|
||||
truncated: true,
|
||||
},
|
||||
},
|
||||
metadata: 'blog-2018-12-14-happy-first-birthday-slash-d2c.json',
|
||||
},
|
||||
{
|
||||
content: 'blog/2018-12-14-Happy-First-Birthday-Slash.md',
|
||||
},
|
||||
{
|
||||
content: {
|
||||
__import: true,
|
||||
path: 'blog/2018-12-14-Happy-First-Birthday-Slash.md',
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
};
|
||||
expect(loadRoutes([flatRouteConfig], '/', 'ignore')).toMatchSnapshot();
|
||||
});
|
||||
|
||||
it('rejects invalid route config', () => {
|
||||
const routeConfigWithoutPath = {
|
||||
component: 'hello/world.js',
|
||||
} as RouteConfig;
|
||||
|
||||
expect(() => loadRoutes([routeConfigWithoutPath], '/', 'ignore'))
|
||||
.toThrowErrorMatchingInlineSnapshot(`
|
||||
"Invalid route config: path must be a string and component is required.
|
||||
{"component":"hello/world.js"}"
|
||||
`);
|
||||
|
||||
const routeConfigWithoutComponent = {
|
||||
path: '/hello/world',
|
||||
} as RouteConfig;
|
||||
|
||||
expect(() => loadRoutes([routeConfigWithoutComponent], '/', 'ignore'))
|
||||
.toThrowErrorMatchingInlineSnapshot(`
|
||||
"Invalid route config: path must be a string and component is required.
|
||||
{"path":"/hello/world"}"
|
||||
`);
|
||||
});
|
||||
|
||||
it('loads route config with empty (but valid) path string', () => {
|
||||
const routeConfig = {
|
||||
path: '',
|
||||
component: 'hello/world.js',
|
||||
} as RouteConfig;
|
||||
|
||||
expect(loadRoutes([routeConfig], '/', 'ignore')).toMatchSnapshot();
|
||||
});
|
||||
});
|
||||
|
|
|
@ -13,15 +13,15 @@ import type {DeepPartial} from 'utility-types';
|
|||
|
||||
describe('load', () => {
|
||||
it('loads props for site with custom i18n path', async () => {
|
||||
const props = await loadSetup('custom-i18n-site');
|
||||
expect(props).toMatchSnapshot();
|
||||
const props2 = await loadSetup('custom-i18n-site', {locale: 'zh-Hans'});
|
||||
expect(props2).toEqual(
|
||||
const site = await loadSetup('custom-i18n-site');
|
||||
expect(site.props).toMatchSnapshot();
|
||||
const site2 = await loadSetup('custom-i18n-site', {locale: 'zh-Hans'});
|
||||
expect(site2.props).toEqual(
|
||||
mergeWithCustomize<DeepPartial<Props>>({
|
||||
customizeArray(a, b, key) {
|
||||
return ['routesPaths', 'plugins'].includes(key) ? b : undefined;
|
||||
},
|
||||
})(props, {
|
||||
})(site.props, {
|
||||
baseUrl: '/zh-Hans/',
|
||||
i18n: {
|
||||
currentLocale: 'zh-Hans',
|
||||
|
@ -38,7 +38,7 @@ describe('load', () => {
|
|||
siteConfig: {
|
||||
baseUrl: '/zh-Hans/',
|
||||
},
|
||||
plugins: props2.plugins,
|
||||
plugins: site2.props.plugins,
|
||||
}),
|
||||
);
|
||||
});
|
|
@ -6,14 +6,14 @@
|
|||
*/
|
||||
|
||||
import path from 'path';
|
||||
import {load, type LoadContextOptions} from '../index';
|
||||
import type {Props} from '@docusaurus/types';
|
||||
import {loadSite, type LoadContextParams} from '../site';
|
||||
import type {Site} from '@docusaurus/types';
|
||||
|
||||
// Helper methods to setup dummy/fake projects.
|
||||
export async function loadSetup(
|
||||
name: string,
|
||||
options?: Partial<LoadContextOptions>,
|
||||
): Promise<Props> {
|
||||
options?: Partial<LoadContextParams>,
|
||||
): Promise<Site> {
|
||||
const fixtures = path.join(__dirname, '__fixtures__');
|
||||
return load({siteDir: path.join(fixtures, name), ...options});
|
||||
return loadSite({siteDir: path.join(fixtures, name), ...options});
|
||||
}
|
||||
|
|
|
@ -1,33 +0,0 @@
|
|||
/**
|
||||
* Copyright (c) Facebook, Inc. and its affiliates.
|
||||
*
|
||||
* This source code is licensed under the MIT license found in the
|
||||
* LICENSE file in the root directory of this source tree.
|
||||
*/
|
||||
|
||||
import {getAllFinalRoutes} from '../utils';
|
||||
import type {RouteConfig} from '@docusaurus/types';
|
||||
|
||||
describe('getAllFinalRoutes', () => {
|
||||
it('gets final routes correctly', () => {
|
||||
const routes: RouteConfig[] = [
|
||||
{
|
||||
path: '/docs',
|
||||
component: '',
|
||||
routes: [
|
||||
{path: '/docs/someDoc', component: ''},
|
||||
{path: '/docs/someOtherDoc', component: ''},
|
||||
],
|
||||
},
|
||||
{
|
||||
path: '/community',
|
||||
component: '',
|
||||
},
|
||||
];
|
||||
expect(getAllFinalRoutes(routes)).toEqual([
|
||||
routes[0]!.routes![0],
|
||||
routes[0]!.routes![1],
|
||||
routes[1],
|
||||
]);
|
||||
});
|
||||
});
|
|
@ -15,7 +15,7 @@ import {
|
|||
serializeURLPath,
|
||||
type URLPath,
|
||||
} from '@docusaurus/utils';
|
||||
import {getAllFinalRoutes} from './utils';
|
||||
import {getAllFinalRoutes} from './routes';
|
||||
import type {RouteConfig, ReportingSeverity} from '@docusaurus/types';
|
||||
|
||||
function matchRoutes(routeConfig: RouteConfig[], pathname: string) {
|
||||
|
|
|
@ -1,14 +1,5 @@
|
|||
// Jest Snapshot v1, https://goo.gl/fbAQLP
|
||||
|
||||
exports[`handleDuplicateRoutes works 1`] = `
|
||||
"Duplicate routes found!
|
||||
- Attempting to create page at /search, but a page already exists at this route.
|
||||
- Attempting to create page at /sameDoc, but a page already exists at this route.
|
||||
- Attempting to create page at /, but a page already exists at this route.
|
||||
- Attempting to create page at /, but a page already exists at this route.
|
||||
This could lead to non-deterministic routing behavior."
|
||||
`;
|
||||
|
||||
exports[`loadRoutes loads flat route config 1`] = `
|
||||
{
|
||||
"registry": {
|
||||
|
@ -49,10 +40,6 @@ export default [
|
|||
},
|
||||
];
|
||||
",
|
||||
"routesPaths": [
|
||||
"/404.html",
|
||||
"/blog",
|
||||
],
|
||||
}
|
||||
`;
|
||||
|
||||
|
@ -122,11 +109,6 @@ export default [
|
|||
},
|
||||
];
|
||||
",
|
||||
"routesPaths": [
|
||||
"/404.html",
|
||||
"/docs/hello",
|
||||
"docs/foo/baz",
|
||||
],
|
||||
}
|
||||
`;
|
||||
|
||||
|
@ -154,9 +136,5 @@ export default [
|
|||
},
|
||||
];
|
||||
",
|
||||
"routesPaths": [
|
||||
"/404.html",
|
||||
"",
|
||||
],
|
||||
}
|
||||
`;
|
|
@ -0,0 +1,205 @@
|
|||
/**
|
||||
* Copyright (c) Facebook, Inc. and its affiliates.
|
||||
*
|
||||
* This source code is licensed under the MIT license found in the
|
||||
* LICENSE file in the root directory of this source tree.
|
||||
*/
|
||||
|
||||
import {generateRoutesCode, genChunkName} from '../codegenRoutes';
|
||||
import type {RouteConfig} from '@docusaurus/types';
|
||||
|
||||
describe('genChunkName', () => {
|
||||
it('works', () => {
|
||||
const firstAssert: {[key: string]: string} = {
|
||||
'/docs/adding-blog': 'docs-adding-blog-062',
|
||||
'/docs/versioning': 'docs-versioning-8a8',
|
||||
'/': 'index',
|
||||
'/blog/2018/04/30/How-I-Converted-Profilo-To-Docusaurus':
|
||||
'blog-2018-04-30-how-i-converted-profilo-to-docusaurus-4f2',
|
||||
'/youtube': 'youtube-429',
|
||||
'/users/en/': 'users-en-f7a',
|
||||
'/blog': 'blog-c06',
|
||||
};
|
||||
Object.keys(firstAssert).forEach((str) => {
|
||||
expect(genChunkName(str)).toBe(firstAssert[str]);
|
||||
});
|
||||
});
|
||||
|
||||
it("doesn't allow different chunk name for same path", () => {
|
||||
expect(genChunkName('path/is/similar', 'oldPrefix')).toEqual(
|
||||
genChunkName('path/is/similar', 'newPrefix'),
|
||||
);
|
||||
});
|
||||
|
||||
it('emits different chunk names for different paths even with same preferred name', () => {
|
||||
const secondAssert: {[key: string]: string} = {
|
||||
'/blog/1': 'blog-85-f-089',
|
||||
'/blog/2': 'blog-353-489',
|
||||
};
|
||||
Object.keys(secondAssert).forEach((str) => {
|
||||
expect(genChunkName(str, undefined, 'blog')).toBe(secondAssert[str]);
|
||||
});
|
||||
});
|
||||
|
||||
it('only generates short unique IDs', () => {
|
||||
const thirdAssert: {[key: string]: string} = {
|
||||
a: '0cc175b9',
|
||||
b: '92eb5ffe',
|
||||
c: '4a8a08f0',
|
||||
d: '8277e091',
|
||||
};
|
||||
Object.keys(thirdAssert).forEach((str) => {
|
||||
expect(genChunkName(str, undefined, undefined, true)).toBe(
|
||||
thirdAssert[str],
|
||||
);
|
||||
});
|
||||
expect(genChunkName('d', undefined, undefined, true)).toBe('8277e091');
|
||||
});
|
||||
|
||||
// https://github.com/facebook/docusaurus/issues/8536
|
||||
it('avoids hash collisions', () => {
|
||||
expect(
|
||||
genChunkName(
|
||||
'@site/blog/2022-11-18-bye-medium/index.mdx?truncated=true',
|
||||
'content',
|
||||
'blog',
|
||||
false,
|
||||
),
|
||||
).not.toBe(
|
||||
genChunkName(
|
||||
'@site/blog/2019-10-05-react-nfc/index.mdx?truncated=true',
|
||||
'content',
|
||||
'blog',
|
||||
false,
|
||||
),
|
||||
);
|
||||
expect(
|
||||
genChunkName(
|
||||
'@site/blog/2022-11-18-bye-medium/index.mdx?truncated=true',
|
||||
'content',
|
||||
'blog',
|
||||
true,
|
||||
),
|
||||
).not.toBe(
|
||||
genChunkName(
|
||||
'@site/blog/2019-10-05-react-nfc/index.mdx?truncated=true',
|
||||
'content',
|
||||
'blog',
|
||||
true,
|
||||
),
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe('loadRoutes', () => {
|
||||
it('loads nested route config', () => {
|
||||
const nestedRouteConfig: RouteConfig = {
|
||||
component: '@theme/DocRoot',
|
||||
path: '/docs:route',
|
||||
modules: {
|
||||
docsMetadata: 'docs-b5f.json',
|
||||
},
|
||||
routes: [
|
||||
{
|
||||
path: '/docs/hello',
|
||||
component: '@theme/DocItem',
|
||||
exact: true,
|
||||
modules: {
|
||||
content: 'docs/hello.md',
|
||||
metadata: 'docs-hello-da2.json',
|
||||
},
|
||||
context: {
|
||||
plugin: 'pluginRouteContextModule-100.json',
|
||||
},
|
||||
sidebar: 'main',
|
||||
},
|
||||
{
|
||||
path: 'docs/foo/baz',
|
||||
component: '@theme/DocItem',
|
||||
modules: {
|
||||
content: 'docs/foo/baz.md',
|
||||
metadata: 'docs-foo-baz-dd9.json',
|
||||
},
|
||||
context: {
|
||||
plugin: 'pluginRouteContextModule-100.json',
|
||||
},
|
||||
sidebar: 'secondary',
|
||||
'key:a': 'containing colon',
|
||||
"key'b": 'containing quote',
|
||||
'key"c': 'containing double quote',
|
||||
'key,d': 'containing comma',
|
||||
字段: 'containing unicode',
|
||||
},
|
||||
],
|
||||
};
|
||||
expect(
|
||||
generateRoutesCode([nestedRouteConfig], '/', 'ignore'),
|
||||
).toMatchSnapshot();
|
||||
});
|
||||
|
||||
it('loads flat route config', () => {
|
||||
const flatRouteConfig: RouteConfig = {
|
||||
path: '/blog',
|
||||
component: '@theme/BlogListPage',
|
||||
exact: true,
|
||||
modules: {
|
||||
items: [
|
||||
{
|
||||
content: {
|
||||
__import: true,
|
||||
path: 'blog/2018-12-14-Happy-First-Birthday-Slash.md',
|
||||
query: {
|
||||
truncated: true,
|
||||
},
|
||||
},
|
||||
metadata: 'blog-2018-12-14-happy-first-birthday-slash-d2c.json',
|
||||
},
|
||||
{
|
||||
content: 'blog/2018-12-14-Happy-First-Birthday-Slash.md',
|
||||
},
|
||||
{
|
||||
content: {
|
||||
__import: true,
|
||||
path: 'blog/2018-12-14-Happy-First-Birthday-Slash.md',
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
};
|
||||
expect(
|
||||
generateRoutesCode([flatRouteConfig], '/', 'ignore'),
|
||||
).toMatchSnapshot();
|
||||
});
|
||||
|
||||
it('rejects invalid route config', () => {
|
||||
const routeConfigWithoutPath = {
|
||||
component: 'hello/world.js',
|
||||
} as RouteConfig;
|
||||
|
||||
expect(() => generateRoutesCode([routeConfigWithoutPath], '/', 'ignore'))
|
||||
.toThrowErrorMatchingInlineSnapshot(`
|
||||
"Invalid route config: path must be a string and component is required.
|
||||
{"component":"hello/world.js"}"
|
||||
`);
|
||||
|
||||
const routeConfigWithoutComponent = {
|
||||
path: '/hello/world',
|
||||
} as RouteConfig;
|
||||
|
||||
expect(() =>
|
||||
generateRoutesCode([routeConfigWithoutComponent], '/', 'ignore'),
|
||||
).toThrowErrorMatchingInlineSnapshot(`
|
||||
"Invalid route config: path must be a string and component is required.
|
||||
{"path":"/hello/world"}"
|
||||
`);
|
||||
});
|
||||
|
||||
it('loads route config with empty (but valid) path string', () => {
|
||||
const routeConfig = {
|
||||
path: '',
|
||||
component: 'hello/world.js',
|
||||
} as RouteConfig;
|
||||
|
||||
expect(generateRoutesCode([routeConfig], '/', 'ignore')).toMatchSnapshot();
|
||||
});
|
||||
});
|
157
packages/docusaurus/src/server/codegen/codegen.ts
Normal file
157
packages/docusaurus/src/server/codegen/codegen.ts
Normal file
|
@ -0,0 +1,157 @@
|
|||
/**
|
||||
* Copyright (c) Facebook, Inc. and its affiliates.
|
||||
*
|
||||
* This source code is licensed under the MIT license found in the
|
||||
* LICENSE file in the root directory of this source tree.
|
||||
*/
|
||||
|
||||
import {
|
||||
generate,
|
||||
escapePath,
|
||||
DEFAULT_CONFIG_FILE_NAME,
|
||||
} from '@docusaurus/utils';
|
||||
import {generateRouteFiles} from './codegenRoutes';
|
||||
import type {
|
||||
CodeTranslations,
|
||||
DocusaurusConfig,
|
||||
GlobalData,
|
||||
I18n,
|
||||
RouteConfig,
|
||||
SiteMetadata,
|
||||
} from '@docusaurus/types';
|
||||
|
||||
function genWarning({generatedFilesDir}: {generatedFilesDir: string}) {
|
||||
return generate(
|
||||
generatedFilesDir,
|
||||
// cSpell:ignore DONT
|
||||
'DONT-EDIT-THIS-FOLDER',
|
||||
`This folder stores temp files that Docusaurus' client bundler accesses.
|
||||
|
||||
DO NOT hand-modify files in this folder because they will be overwritten in the
|
||||
next build. You can clear all build artifacts (including this folder) with the
|
||||
\`docusaurus clear\` command.
|
||||
`,
|
||||
);
|
||||
}
|
||||
|
||||
function genSiteConfig({
|
||||
generatedFilesDir,
|
||||
siteConfig,
|
||||
}: {
|
||||
generatedFilesDir: string;
|
||||
siteConfig: DocusaurusConfig;
|
||||
}) {
|
||||
return generate(
|
||||
generatedFilesDir,
|
||||
`${DEFAULT_CONFIG_FILE_NAME}.mjs`,
|
||||
`/*
|
||||
* AUTOGENERATED - DON'T EDIT
|
||||
* Your edits in this file will be overwritten in the next build!
|
||||
* Modify the docusaurus.config.js file at your site's root instead.
|
||||
*/
|
||||
export default ${JSON.stringify(siteConfig, null, 2)};
|
||||
`,
|
||||
);
|
||||
}
|
||||
|
||||
function genClientModules({
|
||||
generatedFilesDir,
|
||||
clientModules,
|
||||
}: {
|
||||
generatedFilesDir: string;
|
||||
clientModules: string[];
|
||||
}) {
|
||||
return generate(
|
||||
generatedFilesDir,
|
||||
'client-modules.js',
|
||||
`export default [
|
||||
${clientModules
|
||||
// Use `require()` because `import()` is async but client modules can have CSS
|
||||
// and the order matters for loading CSS.
|
||||
.map((clientModule) => ` require("${escapePath(clientModule)}"),`)
|
||||
.join('\n')}
|
||||
];
|
||||
`,
|
||||
);
|
||||
}
|
||||
|
||||
function genGlobalData({
|
||||
generatedFilesDir,
|
||||
globalData,
|
||||
}: {
|
||||
generatedFilesDir: string;
|
||||
globalData: GlobalData;
|
||||
}) {
|
||||
return generate(
|
||||
generatedFilesDir,
|
||||
'globalData.json',
|
||||
JSON.stringify(globalData, null, 2),
|
||||
);
|
||||
}
|
||||
|
||||
function genI18n({
|
||||
generatedFilesDir,
|
||||
i18n,
|
||||
}: {
|
||||
generatedFilesDir: string;
|
||||
i18n: I18n;
|
||||
}) {
|
||||
return generate(
|
||||
generatedFilesDir,
|
||||
'i18n.json',
|
||||
JSON.stringify(i18n, null, 2),
|
||||
);
|
||||
}
|
||||
|
||||
function genCodeTranslations({
|
||||
generatedFilesDir,
|
||||
codeTranslations,
|
||||
}: {
|
||||
generatedFilesDir: string;
|
||||
codeTranslations: CodeTranslations;
|
||||
}) {
|
||||
return generate(
|
||||
generatedFilesDir,
|
||||
'codeTranslations.json',
|
||||
JSON.stringify(codeTranslations, null, 2),
|
||||
);
|
||||
}
|
||||
|
||||
function genSiteMetadata({
|
||||
generatedFilesDir,
|
||||
siteMetadata,
|
||||
}: {
|
||||
generatedFilesDir: string;
|
||||
siteMetadata: SiteMetadata;
|
||||
}) {
|
||||
return generate(
|
||||
generatedFilesDir,
|
||||
'site-metadata.json',
|
||||
JSON.stringify(siteMetadata, null, 2),
|
||||
);
|
||||
}
|
||||
|
||||
type CodegenParams = {
|
||||
generatedFilesDir: string;
|
||||
siteConfig: DocusaurusConfig;
|
||||
baseUrl: string;
|
||||
clientModules: string[];
|
||||
globalData: GlobalData;
|
||||
i18n: I18n;
|
||||
codeTranslations: CodeTranslations;
|
||||
siteMetadata: SiteMetadata;
|
||||
routes: RouteConfig[];
|
||||
};
|
||||
|
||||
export async function generateSiteFiles(params: CodegenParams): Promise<void> {
|
||||
await Promise.all([
|
||||
genWarning(params),
|
||||
genClientModules(params),
|
||||
genSiteConfig(params),
|
||||
generateRouteFiles(params),
|
||||
genGlobalData(params),
|
||||
genSiteMetadata(params),
|
||||
genI18n(params),
|
||||
genCodeTranslations(params),
|
||||
]);
|
||||
}
|
327
packages/docusaurus/src/server/codegen/codegenRoutes.ts
Normal file
327
packages/docusaurus/src/server/codegen/codegenRoutes.ts
Normal file
|
@ -0,0 +1,327 @@
|
|||
/**
|
||||
* Copyright (c) Facebook, Inc. and its affiliates.
|
||||
*
|
||||
* This source code is licensed under the MIT license found in the
|
||||
* LICENSE file in the root directory of this source tree.
|
||||
*/
|
||||
|
||||
import query from 'querystring';
|
||||
import _ from 'lodash';
|
||||
import {docuHash, simpleHash, escapePath, generate} from '@docusaurus/utils';
|
||||
import type {
|
||||
Module,
|
||||
RouteConfig,
|
||||
RouteModules,
|
||||
ChunkNames,
|
||||
RouteChunkNames,
|
||||
} from '@docusaurus/types';
|
||||
|
||||
type RoutesCode = {
|
||||
/** Serialized routes config that can be directly emitted into temp file. */
|
||||
routesConfig: string;
|
||||
/** @see {ChunkNames} */
|
||||
routesChunkNames: RouteChunkNames;
|
||||
/**
|
||||
* A map from chunk name to module paths. Module paths would have backslash
|
||||
* escaped already, so they can be directly printed.
|
||||
*/
|
||||
registry: {
|
||||
[chunkName: string]: string;
|
||||
};
|
||||
};
|
||||
|
||||
/** Indents every line of `str` by one level. */
|
||||
function indent(str: string) {
|
||||
return ` ${str.replace(/\n/g, `\n `)}`;
|
||||
}
|
||||
|
||||
const chunkNameCache = new Map<string, string>();
|
||||
const chunkNameCount = new Map<string, number>();
|
||||
|
||||
/**
|
||||
* Generates a unique chunk name that can be used in the chunk registry.
|
||||
*
|
||||
* @param modulePath A path to generate chunk name from. The actual value has no
|
||||
* semantic significance.
|
||||
* @param prefix A prefix to append to the chunk name, to avoid name clash.
|
||||
* @param preferredName Chunk names default to `modulePath`, and this can supply
|
||||
* a more human-readable name.
|
||||
* @param shortId When `true`, the chunk name would only be a hash without any
|
||||
* other characters. Useful for bundle size. Defaults to `true` in production.
|
||||
*/
|
||||
export function genChunkName(
|
||||
modulePath: string,
|
||||
prefix?: string,
|
||||
preferredName?: string,
|
||||
shortId: boolean = process.env.NODE_ENV === 'production',
|
||||
): string {
|
||||
let chunkName = chunkNameCache.get(modulePath);
|
||||
if (!chunkName) {
|
||||
if (shortId) {
|
||||
chunkName = simpleHash(modulePath, 8);
|
||||
} else {
|
||||
let str = modulePath;
|
||||
if (preferredName) {
|
||||
const shortHash = simpleHash(modulePath, 3);
|
||||
str = `${preferredName}${shortHash}`;
|
||||
}
|
||||
const name = docuHash(str);
|
||||
chunkName = prefix ? `${prefix}---${name}` : name;
|
||||
}
|
||||
const seenCount = (chunkNameCount.get(chunkName) ?? 0) + 1;
|
||||
if (seenCount > 1) {
|
||||
chunkName += seenCount.toString(36);
|
||||
}
|
||||
chunkNameCache.set(modulePath, chunkName);
|
||||
chunkNameCount.set(chunkName, seenCount);
|
||||
}
|
||||
return chunkName;
|
||||
}
|
||||
|
||||
/**
|
||||
* Takes a piece of route config, and serializes it into raw JS code. The shape
|
||||
* is the same as react-router's `RouteConfig`. Formatting is similar to
|
||||
* `JSON.stringify` but without all the quotes.
|
||||
*/
|
||||
function serializeRouteConfig({
|
||||
routePath,
|
||||
routeHash,
|
||||
exact,
|
||||
subroutesCodeStrings,
|
||||
props,
|
||||
}: {
|
||||
routePath: string;
|
||||
routeHash: string;
|
||||
exact?: boolean;
|
||||
subroutesCodeStrings?: string[];
|
||||
props: {[propName: string]: unknown};
|
||||
}) {
|
||||
const parts = [
|
||||
`path: '${routePath}'`,
|
||||
`component: ComponentCreator('${routePath}', '${routeHash}')`,
|
||||
];
|
||||
|
||||
if (exact) {
|
||||
parts.push(`exact: true`);
|
||||
}
|
||||
|
||||
if (subroutesCodeStrings) {
|
||||
parts.push(
|
||||
`routes: [
|
||||
${indent(subroutesCodeStrings.join(',\n'))}
|
||||
]`,
|
||||
);
|
||||
}
|
||||
|
||||
Object.entries(props).forEach(([propName, propValue]) => {
|
||||
const isIdentifier =
|
||||
/^[$_\p{ID_Start}][$\u200c\u200d\p{ID_Continue}]*$/u.test(propName);
|
||||
const key = isIdentifier ? propName : JSON.stringify(propName);
|
||||
parts.push(`${key}: ${JSON.stringify(propValue)}`);
|
||||
});
|
||||
|
||||
return `{
|
||||
${indent(parts.join(',\n'))}
|
||||
}`;
|
||||
}
|
||||
|
||||
const isModule = (value: unknown): value is Module =>
|
||||
typeof value === 'string' ||
|
||||
(typeof value === 'object' &&
|
||||
// eslint-disable-next-line no-underscore-dangle
|
||||
!!(value as {[key: string]: unknown} | null)?.__import);
|
||||
|
||||
/**
|
||||
* Takes a {@link Module} (which is nothing more than a path plus some metadata
|
||||
* like query) and returns the string path it represents.
|
||||
*/
|
||||
function getModulePath(target: Module): string {
|
||||
if (typeof target === 'string') {
|
||||
return target;
|
||||
}
|
||||
const queryStr = target.query ? `?${query.stringify(target.query)}` : '';
|
||||
return `${target.path}${queryStr}`;
|
||||
}
|
||||
|
||||
/**
|
||||
* Takes a route module (which is a tree of modules), and transforms each module
|
||||
* into a chunk name. It also mutates `res.registry` and registers the loaders
|
||||
* for each chunk.
|
||||
*
|
||||
* @param routeModule One route module to be transformed.
|
||||
* @param prefix Prefix passed to {@link genChunkName}.
|
||||
* @param name Preferred name passed to {@link genChunkName}.
|
||||
* @param res The route structures being loaded.
|
||||
*/
|
||||
function genChunkNames(
|
||||
routeModule: RouteModules,
|
||||
prefix: string,
|
||||
name: string,
|
||||
res: RoutesCode,
|
||||
): ChunkNames;
|
||||
function genChunkNames(
|
||||
routeModule: RouteModules | RouteModules[] | Module,
|
||||
prefix: string,
|
||||
name: string,
|
||||
res: RoutesCode,
|
||||
): ChunkNames | ChunkNames[] | string;
|
||||
function genChunkNames(
|
||||
routeModule: RouteModules | RouteModules[] | Module,
|
||||
prefix: string,
|
||||
name: string,
|
||||
res: RoutesCode,
|
||||
): string | ChunkNames | ChunkNames[] {
|
||||
if (isModule(routeModule)) {
|
||||
// This is a leaf node, no need to recurse
|
||||
const modulePath = getModulePath(routeModule);
|
||||
const chunkName = genChunkName(modulePath, prefix, name);
|
||||
res.registry[chunkName] = escapePath(modulePath);
|
||||
return chunkName;
|
||||
}
|
||||
if (Array.isArray(routeModule)) {
|
||||
return routeModule.map((val, index) =>
|
||||
genChunkNames(val, `${index}`, name, res),
|
||||
);
|
||||
}
|
||||
return _.mapValues(routeModule, (v, key) => genChunkNames(v, key, name, res));
|
||||
}
|
||||
|
||||
/**
|
||||
* This is the higher level overview of route code generation. For each route
|
||||
* config node, it returns the node's serialized form, and mutates `registry`,
|
||||
* `routesPaths`, and `routesChunkNames` accordingly.
|
||||
*/
|
||||
function genRouteCode(routeConfig: RouteConfig, res: RoutesCode): string {
|
||||
const {
|
||||
path: routePath,
|
||||
component,
|
||||
modules = {},
|
||||
context,
|
||||
routes: subroutes,
|
||||
priority,
|
||||
exact,
|
||||
...props
|
||||
} = routeConfig;
|
||||
|
||||
if (typeof routePath !== 'string' || !component) {
|
||||
throw new Error(
|
||||
`Invalid route config: path must be a string and component is required.
|
||||
${JSON.stringify(routeConfig)}`,
|
||||
);
|
||||
}
|
||||
|
||||
const routeHash = simpleHash(JSON.stringify(routeConfig), 3);
|
||||
res.routesChunkNames[`${routePath}-${routeHash}`] = {
|
||||
// Avoid clash with a prop called "component"
|
||||
...genChunkNames({__comp: component}, 'component', component, res),
|
||||
...(context &&
|
||||
genChunkNames({__context: context}, 'context', routePath, res)),
|
||||
...genChunkNames(modules, 'module', routePath, res),
|
||||
};
|
||||
|
||||
return serializeRouteConfig({
|
||||
routePath: routePath.replace(/'/g, "\\'"),
|
||||
routeHash,
|
||||
subroutesCodeStrings: subroutes?.map((r) => genRouteCode(r, res)),
|
||||
exact,
|
||||
props,
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Routes are prepared into three temp files:
|
||||
*
|
||||
* - `routesConfig`, the route config passed to react-router. This file is kept
|
||||
* minimal, because it can't be code-splitted.
|
||||
* - `routesChunkNames`, a mapping from route paths (hashed) to code-splitted
|
||||
* chunk names.
|
||||
* - `registry`, a mapping from chunk names to options for react-loadable.
|
||||
*/
|
||||
export function generateRoutesCode(routeConfigs: RouteConfig[]): RoutesCode {
|
||||
const res: RoutesCode = {
|
||||
// To be written by `genRouteCode`
|
||||
routesConfig: '',
|
||||
routesChunkNames: {},
|
||||
registry: {},
|
||||
};
|
||||
|
||||
// `genRouteCode` would mutate `res`
|
||||
const routeConfigSerialized = routeConfigs
|
||||
.map((r) => genRouteCode(r, res))
|
||||
.join(',\n');
|
||||
|
||||
res.routesConfig = `import React from 'react';
|
||||
import ComponentCreator from '@docusaurus/ComponentCreator';
|
||||
|
||||
export default [
|
||||
${indent(routeConfigSerialized)},
|
||||
{
|
||||
path: '*',
|
||||
component: ComponentCreator('*'),
|
||||
},
|
||||
];
|
||||
`;
|
||||
|
||||
return res;
|
||||
}
|
||||
|
||||
const genRegistry = ({
|
||||
generatedFilesDir,
|
||||
registry,
|
||||
}: {
|
||||
generatedFilesDir: string;
|
||||
registry: RoutesCode['registry'];
|
||||
}) =>
|
||||
generate(
|
||||
generatedFilesDir,
|
||||
'registry.js',
|
||||
`export default {
|
||||
${Object.entries(registry)
|
||||
.sort((a, b) => a[0].localeCompare(b[0]))
|
||||
.map(
|
||||
([chunkName, modulePath]) =>
|
||||
// modulePath is already escaped by escapePath
|
||||
` "${chunkName}": [() => import(/* webpackChunkName: "${chunkName}" */ "${modulePath}"), "${modulePath}", require.resolveWeak("${modulePath}")],`,
|
||||
)
|
||||
.join('\n')}};
|
||||
`,
|
||||
);
|
||||
|
||||
const genRoutesChunkNames = ({
|
||||
generatedFilesDir,
|
||||
routesChunkNames,
|
||||
}: {
|
||||
generatedFilesDir: string;
|
||||
routesChunkNames: RoutesCode['routesChunkNames'];
|
||||
}) =>
|
||||
generate(
|
||||
generatedFilesDir,
|
||||
'routesChunkNames.json',
|
||||
JSON.stringify(routesChunkNames, null, 2),
|
||||
);
|
||||
|
||||
const genRoutes = ({
|
||||
generatedFilesDir,
|
||||
routesConfig,
|
||||
}: {
|
||||
generatedFilesDir: string;
|
||||
routesConfig: RoutesCode['routesConfig'];
|
||||
}) => generate(generatedFilesDir, 'routes.js', routesConfig);
|
||||
|
||||
type GenerateRouteFilesParams = {
|
||||
generatedFilesDir: string;
|
||||
routes: RouteConfig[];
|
||||
baseUrl: string;
|
||||
};
|
||||
|
||||
export async function generateRouteFiles({
|
||||
generatedFilesDir,
|
||||
routes,
|
||||
}: GenerateRouteFilesParams): Promise<void> {
|
||||
const {registry, routesChunkNames, routesConfig} = generateRoutesCode(routes);
|
||||
await Promise.all([
|
||||
genRegistry({generatedFilesDir, registry}),
|
||||
genRoutesChunkNames({generatedFilesDir, routesChunkNames}),
|
||||
genRoutes({generatedFilesDir, routesConfig}),
|
||||
]);
|
||||
}
|
|
@ -8,7 +8,7 @@
|
|||
import logger from '@docusaurus/logger';
|
||||
import {getLangDir} from 'rtl-detect';
|
||||
import type {I18n, DocusaurusConfig, I18nLocaleConfig} from '@docusaurus/types';
|
||||
import type {LoadContextOptions} from './index';
|
||||
import type {LoadContextParams} from './site';
|
||||
|
||||
function getDefaultLocaleLabel(locale: string) {
|
||||
const languageName = new Intl.DisplayNames(locale, {type: 'language'}).of(
|
||||
|
@ -55,7 +55,7 @@ export function getDefaultLocaleConfig(locale: string): I18nLocaleConfig {
|
|||
|
||||
export async function loadI18n(
|
||||
config: DocusaurusConfig,
|
||||
options: Pick<LoadContextOptions, 'locale'>,
|
||||
options: Pick<LoadContextParams, 'locale'>,
|
||||
): Promise<I18n> {
|
||||
const {i18n: i18nConfig} = config;
|
||||
|
||||
|
|
|
@ -1,266 +0,0 @@
|
|||
/**
|
||||
* Copyright (c) Facebook, Inc. and its affiliates.
|
||||
*
|
||||
* This source code is licensed under the MIT license found in the
|
||||
* LICENSE file in the root directory of this source tree.
|
||||
*/
|
||||
|
||||
import path from 'path';
|
||||
import _ from 'lodash';
|
||||
import {
|
||||
generate,
|
||||
escapePath,
|
||||
localizePath,
|
||||
DEFAULT_BUILD_DIR_NAME,
|
||||
DEFAULT_CONFIG_FILE_NAME,
|
||||
GENERATED_FILES_DIR_NAME,
|
||||
} from '@docusaurus/utils';
|
||||
import {loadSiteConfig} from './config';
|
||||
import {loadClientModules} from './clientModules';
|
||||
import {loadPlugins} from './plugins';
|
||||
import {loadRoutes} from './routes';
|
||||
import {loadHtmlTags} from './htmlTags';
|
||||
import {loadSiteMetadata} from './siteMetadata';
|
||||
import {loadI18n} from './i18n';
|
||||
import {
|
||||
readCodeTranslationFileContent,
|
||||
getPluginsDefaultCodeTranslationMessages,
|
||||
} from './translations/translations';
|
||||
import type {DocusaurusConfig, LoadContext, Props} from '@docusaurus/types';
|
||||
|
||||
export type LoadContextOptions = {
|
||||
/** Usually the CWD; can be overridden with command argument. */
|
||||
siteDir: string;
|
||||
/** Custom output directory. Can be customized with `--out-dir` option */
|
||||
outDir?: string;
|
||||
/** Custom config path. Can be customized with `--config` option */
|
||||
config?: string;
|
||||
/** Default is `i18n.defaultLocale` */
|
||||
locale?: string;
|
||||
/**
|
||||
* `true` means the paths will have the locale prepended; `false` means they
|
||||
* won't (useful for `yarn build -l zh-Hans` where the output should be
|
||||
* emitted into `build/` instead of `build/zh-Hans/`); `undefined` is like the
|
||||
* "smart" option where only non-default locale paths are localized
|
||||
*/
|
||||
localizePath?: boolean;
|
||||
};
|
||||
|
||||
/**
|
||||
* Loading context is the very first step in site building. Its options are
|
||||
* directly acquired from CLI options. It mainly loads `siteConfig` and the i18n
|
||||
* context (which includes code translations). The `LoadContext` will be passed
|
||||
* to plugin constructors.
|
||||
*/
|
||||
export async function loadContext(
|
||||
options: LoadContextOptions,
|
||||
): Promise<LoadContext> {
|
||||
const {
|
||||
siteDir,
|
||||
outDir: baseOutDir = DEFAULT_BUILD_DIR_NAME,
|
||||
locale,
|
||||
config: customConfigFilePath,
|
||||
} = options;
|
||||
const generatedFilesDir = path.resolve(siteDir, GENERATED_FILES_DIR_NAME);
|
||||
|
||||
const {siteConfig: initialSiteConfig, siteConfigPath} = await loadSiteConfig({
|
||||
siteDir,
|
||||
customConfigFilePath,
|
||||
});
|
||||
|
||||
const i18n = await loadI18n(initialSiteConfig, {locale});
|
||||
|
||||
const baseUrl = localizePath({
|
||||
path: initialSiteConfig.baseUrl,
|
||||
i18n,
|
||||
options,
|
||||
pathType: 'url',
|
||||
});
|
||||
const outDir = localizePath({
|
||||
path: path.resolve(siteDir, baseOutDir),
|
||||
i18n,
|
||||
options,
|
||||
pathType: 'fs',
|
||||
});
|
||||
|
||||
const siteConfig: DocusaurusConfig = {...initialSiteConfig, baseUrl};
|
||||
|
||||
const localizationDir = path.resolve(
|
||||
siteDir,
|
||||
i18n.path,
|
||||
i18n.localeConfigs[i18n.currentLocale]!.path,
|
||||
);
|
||||
|
||||
const codeTranslationFileContent =
|
||||
(await readCodeTranslationFileContent({localizationDir})) ?? {};
|
||||
|
||||
// We only need key->message for code translations
|
||||
const codeTranslations = _.mapValues(
|
||||
codeTranslationFileContent,
|
||||
(value) => value.message,
|
||||
);
|
||||
|
||||
return {
|
||||
siteDir,
|
||||
generatedFilesDir,
|
||||
localizationDir,
|
||||
siteConfig,
|
||||
siteConfigPath,
|
||||
outDir,
|
||||
baseUrl,
|
||||
i18n,
|
||||
codeTranslations,
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* This is the crux of the Docusaurus server-side. It reads everything it needs—
|
||||
* code translations, config file, plugin modules... Plugins then use their
|
||||
* lifecycles to generate content and other data. It is side-effect-ful because
|
||||
* it generates temp files in the `.docusaurus` folder for the bundler.
|
||||
*/
|
||||
export async function load(options: LoadContextOptions): Promise<Props> {
|
||||
const {siteDir} = options;
|
||||
const context = await loadContext(options);
|
||||
const {
|
||||
generatedFilesDir,
|
||||
siteConfig,
|
||||
siteConfigPath,
|
||||
outDir,
|
||||
baseUrl,
|
||||
i18n,
|
||||
localizationDir,
|
||||
codeTranslations: siteCodeTranslations,
|
||||
} = context;
|
||||
const {plugins, pluginsRouteConfigs, globalData} = await loadPlugins(context);
|
||||
const clientModules = loadClientModules(plugins);
|
||||
const {headTags, preBodyTags, postBodyTags} = loadHtmlTags(plugins);
|
||||
const {registry, routesChunkNames, routesConfig, routesPaths} = loadRoutes(
|
||||
pluginsRouteConfigs,
|
||||
baseUrl,
|
||||
siteConfig.onDuplicateRoutes,
|
||||
);
|
||||
const codeTranslations = {
|
||||
...(await getPluginsDefaultCodeTranslationMessages(plugins)),
|
||||
...siteCodeTranslations,
|
||||
};
|
||||
const siteMetadata = await loadSiteMetadata({plugins, siteDir});
|
||||
|
||||
// === Side-effects part ===
|
||||
|
||||
const genWarning = generate(
|
||||
generatedFilesDir,
|
||||
// cSpell:ignore DONT
|
||||
'DONT-EDIT-THIS-FOLDER',
|
||||
`This folder stores temp files that Docusaurus' client bundler accesses.
|
||||
|
||||
DO NOT hand-modify files in this folder because they will be overwritten in the
|
||||
next build. You can clear all build artifacts (including this folder) with the
|
||||
\`docusaurus clear\` command.
|
||||
`,
|
||||
);
|
||||
|
||||
const genSiteConfig = generate(
|
||||
generatedFilesDir,
|
||||
`${DEFAULT_CONFIG_FILE_NAME}.mjs`,
|
||||
`/*
|
||||
* AUTOGENERATED - DON'T EDIT
|
||||
* Your edits in this file will be overwritten in the next build!
|
||||
* Modify the docusaurus.config.js file at your site's root instead.
|
||||
*/
|
||||
export default ${JSON.stringify(siteConfig, null, 2)};
|
||||
`,
|
||||
);
|
||||
|
||||
const genClientModules = generate(
|
||||
generatedFilesDir,
|
||||
'client-modules.js',
|
||||
`export default [
|
||||
${clientModules
|
||||
// Use `require()` because `import()` is async but client modules can have CSS
|
||||
// and the order matters for loading CSS.
|
||||
.map((clientModule) => ` require("${escapePath(clientModule)}"),`)
|
||||
.join('\n')}
|
||||
];
|
||||
`,
|
||||
);
|
||||
|
||||
const genRegistry = generate(
|
||||
generatedFilesDir,
|
||||
'registry.js',
|
||||
`export default {
|
||||
${Object.entries(registry)
|
||||
.sort((a, b) => a[0].localeCompare(b[0]))
|
||||
.map(
|
||||
([chunkName, modulePath]) =>
|
||||
// modulePath is already escaped by escapePath
|
||||
` "${chunkName}": [() => import(/* webpackChunkName: "${chunkName}" */ "${modulePath}"), "${modulePath}", require.resolveWeak("${modulePath}")],`,
|
||||
)
|
||||
.join('\n')}};
|
||||
`,
|
||||
);
|
||||
|
||||
const genRoutesChunkNames = generate(
|
||||
generatedFilesDir,
|
||||
'routesChunkNames.json',
|
||||
JSON.stringify(routesChunkNames, null, 2),
|
||||
);
|
||||
|
||||
const genRoutes = generate(generatedFilesDir, 'routes.js', routesConfig);
|
||||
|
||||
const genGlobalData = generate(
|
||||
generatedFilesDir,
|
||||
'globalData.json',
|
||||
JSON.stringify(globalData, null, 2),
|
||||
);
|
||||
|
||||
const genI18n = generate(
|
||||
generatedFilesDir,
|
||||
'i18n.json',
|
||||
JSON.stringify(i18n, null, 2),
|
||||
);
|
||||
|
||||
const genCodeTranslations = generate(
|
||||
generatedFilesDir,
|
||||
'codeTranslations.json',
|
||||
JSON.stringify(codeTranslations, null, 2),
|
||||
);
|
||||
|
||||
const genSiteMetadata = generate(
|
||||
generatedFilesDir,
|
||||
'site-metadata.json',
|
||||
JSON.stringify(siteMetadata, null, 2),
|
||||
);
|
||||
|
||||
await Promise.all([
|
||||
genWarning,
|
||||
genClientModules,
|
||||
genSiteConfig,
|
||||
genRegistry,
|
||||
genRoutesChunkNames,
|
||||
genRoutes,
|
||||
genGlobalData,
|
||||
genSiteMetadata,
|
||||
genI18n,
|
||||
genCodeTranslations,
|
||||
]);
|
||||
|
||||
return {
|
||||
siteConfig,
|
||||
siteConfigPath,
|
||||
siteMetadata,
|
||||
siteDir,
|
||||
outDir,
|
||||
baseUrl,
|
||||
i18n,
|
||||
localizationDir,
|
||||
generatedFilesDir,
|
||||
routes: pluginsRouteConfigs,
|
||||
routesPaths,
|
||||
plugins,
|
||||
headTags,
|
||||
preBodyTags,
|
||||
postBodyTags,
|
||||
codeTranslations,
|
||||
};
|
||||
}
|
|
@ -63,7 +63,7 @@ exports[`loadPlugins loads plugins 1`] = `
|
|||
},
|
||||
},
|
||||
],
|
||||
"pluginsRouteConfigs": [
|
||||
"routes": [
|
||||
{
|
||||
"component": "Comp",
|
||||
"context": {
|
|
@ -1,6 +1,6 @@
|
|||
// Jest Snapshot v1, https://goo.gl/fbAQLP
|
||||
|
||||
exports[`sortConfig sorts route config correctly 1`] = `
|
||||
exports[`sortRoutes sorts route config correctly 1`] = `
|
||||
[
|
||||
{
|
||||
"component": "",
|
||||
|
@ -55,7 +55,7 @@ exports[`sortConfig sorts route config correctly 1`] = `
|
|||
]
|
||||
`;
|
||||
|
||||
exports[`sortConfig sorts route config given a baseURL 1`] = `
|
||||
exports[`sortRoutes sorts route config given a baseURL 1`] = `
|
||||
[
|
||||
{
|
||||
"component": "",
|
||||
|
@ -104,7 +104,153 @@ exports[`sortConfig sorts route config given a baseURL 1`] = `
|
|||
]
|
||||
`;
|
||||
|
||||
exports[`sortConfig sorts route config recursively 1`] = `
|
||||
exports[`sortRoutes sorts route config recursively 1`] = `
|
||||
[
|
||||
{
|
||||
"component": "",
|
||||
"exact": true,
|
||||
"path": "/some/page",
|
||||
},
|
||||
{
|
||||
"component": "",
|
||||
"path": "/docs",
|
||||
"routes": [
|
||||
{
|
||||
"component": "",
|
||||
"exact": true,
|
||||
"path": "/docs/tags",
|
||||
},
|
||||
{
|
||||
"component": "",
|
||||
"exact": true,
|
||||
"path": "/docs/tags/someTag",
|
||||
},
|
||||
{
|
||||
"component": "",
|
||||
"path": "/docs",
|
||||
"routes": [
|
||||
{
|
||||
"component": "",
|
||||
"exact": true,
|
||||
"path": "/docs/doc1",
|
||||
},
|
||||
{
|
||||
"component": "",
|
||||
"exact": true,
|
||||
"path": "/docs/doc2",
|
||||
},
|
||||
],
|
||||
},
|
||||
],
|
||||
},
|
||||
]
|
||||
`;
|
||||
|
||||
exports[`sortRoutes sorts route config correctly 1`] = `
|
||||
[
|
||||
{
|
||||
"component": "",
|
||||
"path": "/community",
|
||||
},
|
||||
{
|
||||
"component": "",
|
||||
"path": "/some-page",
|
||||
},
|
||||
{
|
||||
"component": "",
|
||||
"path": "/docs",
|
||||
"routes": [
|
||||
{
|
||||
"component": "",
|
||||
"path": "/docs/someDoc",
|
||||
},
|
||||
{
|
||||
"component": "",
|
||||
"path": "/docs/someOtherDoc",
|
||||
},
|
||||
],
|
||||
},
|
||||
{
|
||||
"component": "",
|
||||
"path": "/",
|
||||
},
|
||||
{
|
||||
"component": "",
|
||||
"path": "/",
|
||||
"routes": [
|
||||
{
|
||||
"component": "",
|
||||
"path": "/someDoc",
|
||||
},
|
||||
{
|
||||
"component": "",
|
||||
"path": "/someOtherDoc",
|
||||
},
|
||||
],
|
||||
},
|
||||
{
|
||||
"component": "",
|
||||
"path": "/",
|
||||
"routes": [
|
||||
{
|
||||
"component": "",
|
||||
"path": "/subroute",
|
||||
},
|
||||
],
|
||||
},
|
||||
]
|
||||
`;
|
||||
|
||||
exports[`sortRoutes sorts route config given a baseURL 1`] = `
|
||||
[
|
||||
{
|
||||
"component": "",
|
||||
"path": "/latest/community",
|
||||
},
|
||||
{
|
||||
"component": "",
|
||||
"path": "/latest/example",
|
||||
},
|
||||
{
|
||||
"component": "",
|
||||
"path": "/latest/some-page",
|
||||
},
|
||||
{
|
||||
"component": "",
|
||||
"path": "/latest/docs",
|
||||
"routes": [
|
||||
{
|
||||
"component": "",
|
||||
"path": "/latest/docs/someDoc",
|
||||
},
|
||||
{
|
||||
"component": "",
|
||||
"path": "/latest/docs/someOtherDoc",
|
||||
},
|
||||
],
|
||||
},
|
||||
{
|
||||
"component": "",
|
||||
"path": "/latest/",
|
||||
},
|
||||
{
|
||||
"component": "",
|
||||
"path": "/latest/",
|
||||
"routes": [
|
||||
{
|
||||
"component": "",
|
||||
"path": "/latest/someDoc",
|
||||
},
|
||||
{
|
||||
"component": "",
|
||||
"path": "/latest/someOtherDoc",
|
||||
},
|
||||
],
|
||||
},
|
||||
]
|
||||
`;
|
||||
|
||||
exports[`sortRoutes sorts route config recursively 1`] = `
|
||||
[
|
||||
{
|
||||
"component": "",
|
||||
|
|
|
@ -7,11 +7,11 @@
|
|||
|
||||
import path from 'path';
|
||||
|
||||
import {loadContext, type LoadContextOptions} from '../../index';
|
||||
import {loadContext, type LoadContextParams} from '../../site';
|
||||
import {initPlugins} from '../init';
|
||||
|
||||
describe('initPlugins', () => {
|
||||
async function loadSite(options: Omit<LoadContextOptions, 'siteDir'> = {}) {
|
||||
async function loadSite(options: Omit<LoadContextParams, 'siteDir'> = {}) {
|
||||
const siteDir = path.join(__dirname, '__fixtures__', 'site-with-plugin');
|
||||
const context = await loadContext({...options, siteDir});
|
||||
const plugins = await initPlugins(context);
|
||||
|
|
|
@ -6,7 +6,7 @@
|
|||
*/
|
||||
|
||||
import path from 'path';
|
||||
import {loadPlugins} from '..';
|
||||
import {loadPlugins} from '../plugins';
|
||||
import type {Plugin, Props} from '@docusaurus/types';
|
||||
|
||||
describe('loadPlugins', () => {
|
|
@ -5,7 +5,7 @@
|
|||
* LICENSE file in the root directory of this source tree.
|
||||
*/
|
||||
|
||||
import {applyRouteTrailingSlash, sortConfig} from '../routeConfig';
|
||||
import {applyRouteTrailingSlash, sortRoutes} from '../routeConfig';
|
||||
import type {RouteConfig} from '@docusaurus/types';
|
||||
import type {ApplyTrailingSlashParams} from '@docusaurus/utils-common';
|
||||
|
||||
|
@ -164,7 +164,7 @@ describe('applyRouteTrailingSlash', () => {
|
|||
});
|
||||
});
|
||||
|
||||
describe('sortConfig', () => {
|
||||
describe('sortRoutes', () => {
|
||||
it('sorts route config correctly', () => {
|
||||
const routes: RouteConfig[] = [
|
||||
{
|
||||
|
@ -202,7 +202,7 @@ describe('sortConfig', () => {
|
|||
},
|
||||
];
|
||||
|
||||
sortConfig(routes);
|
||||
sortRoutes(routes);
|
||||
|
||||
expect(routes).toMatchSnapshot();
|
||||
});
|
||||
|
@ -248,7 +248,7 @@ describe('sortConfig', () => {
|
|||
},
|
||||
];
|
||||
|
||||
sortConfig(routes);
|
||||
sortRoutes(routes);
|
||||
|
||||
expect(routes).toMatchSnapshot();
|
||||
});
|
||||
|
@ -290,7 +290,7 @@ describe('sortConfig', () => {
|
|||
},
|
||||
];
|
||||
|
||||
sortConfig(routes, baseURL);
|
||||
sortRoutes(routes, baseURL);
|
||||
|
||||
expect(routes).toMatchSnapshot();
|
||||
});
|
||||
|
|
|
@ -1,153 +0,0 @@
|
|||
/**
|
||||
* Copyright (c) Facebook, Inc. and its affiliates.
|
||||
*
|
||||
* This source code is licensed under the MIT license found in the
|
||||
* LICENSE file in the root directory of this source tree.
|
||||
*/
|
||||
|
||||
import path from 'path';
|
||||
import _ from 'lodash';
|
||||
import {docuHash, generate} from '@docusaurus/utils';
|
||||
import {initPlugins} from './init';
|
||||
import {createBootstrapPlugin, createMDXFallbackPlugin} from './synthetic';
|
||||
import {localizePluginTranslationFile} from '../translations/translations';
|
||||
import {applyRouteTrailingSlash, sortConfig} from './routeConfig';
|
||||
import type {
|
||||
LoadContext,
|
||||
PluginContentLoadedActions,
|
||||
RouteConfig,
|
||||
AllContent,
|
||||
GlobalData,
|
||||
LoadedPlugin,
|
||||
InitializedPlugin,
|
||||
PluginRouteContext,
|
||||
} from '@docusaurus/types';
|
||||
|
||||
/**
|
||||
* Initializes the plugins, runs `loadContent`, `translateContent`,
|
||||
* `contentLoaded`, and `translateThemeConfig`. Because `contentLoaded` is
|
||||
* side-effect-ful (it generates temp files), so is this function. This function
|
||||
* would also mutate `context.siteConfig.themeConfig` to translate it.
|
||||
*/
|
||||
export async function loadPlugins(context: LoadContext): Promise<{
|
||||
plugins: LoadedPlugin[];
|
||||
pluginsRouteConfigs: RouteConfig[];
|
||||
globalData: GlobalData;
|
||||
}> {
|
||||
// 1. Plugin Lifecycle - Initialization/Constructor.
|
||||
const plugins: InitializedPlugin[] = await initPlugins(context);
|
||||
|
||||
plugins.push(
|
||||
createBootstrapPlugin(context),
|
||||
createMDXFallbackPlugin(context),
|
||||
);
|
||||
|
||||
// 2. Plugin Lifecycle - loadContent.
|
||||
// Currently plugins run lifecycle methods in parallel and are not
|
||||
// order-dependent. We could change this in future if there are plugins which
|
||||
// need to run in certain order or depend on others for data.
|
||||
// This would also translate theme config and content upfront, given the
|
||||
// translation files that the plugin declares.
|
||||
const loadedPlugins: LoadedPlugin[] = await Promise.all(
|
||||
plugins.map(async (plugin) => {
|
||||
const content = await plugin.loadContent?.();
|
||||
const rawTranslationFiles =
|
||||
(await plugin.getTranslationFiles?.({content})) ?? [];
|
||||
const translationFiles = await Promise.all(
|
||||
rawTranslationFiles.map((translationFile) =>
|
||||
localizePluginTranslationFile({
|
||||
localizationDir: context.localizationDir,
|
||||
translationFile,
|
||||
plugin,
|
||||
}),
|
||||
),
|
||||
);
|
||||
const translatedContent =
|
||||
plugin.translateContent?.({content, translationFiles}) ?? content;
|
||||
const translatedThemeConfigSlice = plugin.translateThemeConfig?.({
|
||||
themeConfig: context.siteConfig.themeConfig,
|
||||
translationFiles,
|
||||
});
|
||||
// Side-effect to merge theme config translations. A plugin should only
|
||||
// translate its own slice of theme config and should make no assumptions
|
||||
// about other plugins' keys, so this is safe to run in parallel.
|
||||
Object.assign(context.siteConfig.themeConfig, translatedThemeConfigSlice);
|
||||
return {...plugin, content: translatedContent};
|
||||
}),
|
||||
);
|
||||
|
||||
const allContent: AllContent = _.chain(loadedPlugins)
|
||||
.groupBy((item) => item.name)
|
||||
.mapValues((nameItems) =>
|
||||
_.chain(nameItems)
|
||||
.groupBy((item) => item.options.id)
|
||||
.mapValues((idItems) => idItems[0]!.content)
|
||||
.value(),
|
||||
)
|
||||
.value();
|
||||
|
||||
// 3. Plugin Lifecycle - contentLoaded.
|
||||
const pluginsRouteConfigs: RouteConfig[] = [];
|
||||
const globalData: GlobalData = {};
|
||||
|
||||
await Promise.all(
|
||||
loadedPlugins.map(async ({content, ...plugin}) => {
|
||||
if (!plugin.contentLoaded) {
|
||||
return;
|
||||
}
|
||||
const pluginId = plugin.options.id;
|
||||
// Plugins data files are namespaced by pluginName/pluginId
|
||||
const dataDir = path.join(
|
||||
context.generatedFilesDir,
|
||||
plugin.name,
|
||||
pluginId,
|
||||
);
|
||||
const pluginRouteContextModulePath = path.join(
|
||||
dataDir,
|
||||
`${docuHash('pluginRouteContextModule')}.json`,
|
||||
);
|
||||
const pluginRouteContext: PluginRouteContext['plugin'] = {
|
||||
name: plugin.name,
|
||||
id: pluginId,
|
||||
};
|
||||
await generate(
|
||||
'/',
|
||||
pluginRouteContextModulePath,
|
||||
JSON.stringify(pluginRouteContext, null, 2),
|
||||
);
|
||||
const actions: PluginContentLoadedActions = {
|
||||
addRoute(initialRouteConfig) {
|
||||
// Trailing slash behavior is handled generically for all plugins
|
||||
const finalRouteConfig = applyRouteTrailingSlash(
|
||||
initialRouteConfig,
|
||||
context.siteConfig,
|
||||
);
|
||||
pluginsRouteConfigs.push({
|
||||
...finalRouteConfig,
|
||||
context: {
|
||||
...(finalRouteConfig.context && {data: finalRouteConfig.context}),
|
||||
plugin: pluginRouteContextModulePath,
|
||||
},
|
||||
});
|
||||
},
|
||||
async createData(name, data) {
|
||||
const modulePath = path.join(dataDir, name);
|
||||
await generate(dataDir, name, data);
|
||||
return modulePath;
|
||||
},
|
||||
setGlobalData(data) {
|
||||
globalData[plugin.name] ??= {};
|
||||
globalData[plugin.name]![pluginId] = data;
|
||||
},
|
||||
};
|
||||
|
||||
await plugin.contentLoaded({content, actions, allContent});
|
||||
}),
|
||||
);
|
||||
|
||||
// Sort the route config. This ensures that route with nested
|
||||
// routes are always placed last.
|
||||
sortConfig(pluginsRouteConfigs, context.siteConfig.baseUrl);
|
||||
|
||||
return {plugins: loadedPlugins, pluginsRouteConfigs, globalData};
|
||||
}
|
318
packages/docusaurus/src/server/plugins/plugins.ts
Normal file
318
packages/docusaurus/src/server/plugins/plugins.ts
Normal file
|
@ -0,0 +1,318 @@
|
|||
/**
|
||||
* Copyright (c) Facebook, Inc. and its affiliates.
|
||||
*
|
||||
* This source code is licensed under the MIT license found in the
|
||||
* LICENSE file in the root directory of this source tree.
|
||||
*/
|
||||
|
||||
import path from 'path';
|
||||
import _ from 'lodash';
|
||||
import {docuHash, generate} from '@docusaurus/utils';
|
||||
import logger from '@docusaurus/logger';
|
||||
import {initPlugins} from './init';
|
||||
import {createBootstrapPlugin, createMDXFallbackPlugin} from './synthetic';
|
||||
import {localizePluginTranslationFile} from '../translations/translations';
|
||||
import {applyRouteTrailingSlash, sortRoutes} from './routeConfig';
|
||||
import {PerfLogger} from '../../utils';
|
||||
import type {
|
||||
LoadContext,
|
||||
PluginContentLoadedActions,
|
||||
RouteConfig,
|
||||
AllContent,
|
||||
GlobalData,
|
||||
LoadedPlugin,
|
||||
InitializedPlugin,
|
||||
PluginRouteContext,
|
||||
} from '@docusaurus/types';
|
||||
import type {PluginIdentifier} from '@docusaurus/types/src/plugin';
|
||||
|
||||
async function translatePlugin({
|
||||
plugin,
|
||||
context,
|
||||
}: {
|
||||
plugin: LoadedPlugin;
|
||||
context: LoadContext;
|
||||
}): Promise<LoadedPlugin> {
|
||||
const {content} = plugin;
|
||||
|
||||
const rawTranslationFiles =
|
||||
(await plugin.getTranslationFiles?.({content: plugin.content})) ?? [];
|
||||
|
||||
const translationFiles = await Promise.all(
|
||||
rawTranslationFiles.map((translationFile) =>
|
||||
localizePluginTranslationFile({
|
||||
localizationDir: context.localizationDir,
|
||||
translationFile,
|
||||
plugin,
|
||||
}),
|
||||
),
|
||||
);
|
||||
|
||||
const translatedContent =
|
||||
plugin.translateContent?.({content, translationFiles}) ?? content;
|
||||
|
||||
const translatedThemeConfigSlice = plugin.translateThemeConfig?.({
|
||||
themeConfig: context.siteConfig.themeConfig,
|
||||
translationFiles,
|
||||
});
|
||||
|
||||
// TODO dangerous legacy, need to be refactored!
|
||||
// Side-effect to merge theme config translations. A plugin should only
|
||||
// translate its own slice of theme config and should make no assumptions
|
||||
// about other plugins' keys, so this is safe to run in parallel.
|
||||
Object.assign(context.siteConfig.themeConfig, translatedThemeConfigSlice);
|
||||
return {...plugin, content: translatedContent};
|
||||
}
|
||||
|
||||
async function executePluginLoadContent({
|
||||
plugin,
|
||||
context,
|
||||
}: {
|
||||
plugin: InitializedPlugin;
|
||||
context: LoadContext;
|
||||
}): Promise<LoadedPlugin> {
|
||||
return PerfLogger.async(
|
||||
`Plugin - loadContent - ${plugin.name}@${plugin.options.id}`,
|
||||
async () => {
|
||||
const content = await plugin.loadContent?.();
|
||||
const loadedPlugin: LoadedPlugin = {...plugin, content};
|
||||
return translatePlugin({plugin: loadedPlugin, context});
|
||||
},
|
||||
);
|
||||
}
|
||||
|
||||
async function executePluginsLoadContent({
|
||||
plugins,
|
||||
context,
|
||||
}: {
|
||||
plugins: InitializedPlugin[];
|
||||
context: LoadContext;
|
||||
}) {
|
||||
return PerfLogger.async(`Plugins - loadContent`, () =>
|
||||
Promise.all(
|
||||
plugins.map((plugin) => executePluginLoadContent({plugin, context})),
|
||||
),
|
||||
);
|
||||
}
|
||||
|
||||
function aggregateAllContent(loadedPlugins: LoadedPlugin[]): AllContent {
|
||||
return _.chain(loadedPlugins)
|
||||
.groupBy((item) => item.name)
|
||||
.mapValues((nameItems) =>
|
||||
_.chain(nameItems)
|
||||
.groupBy((item) => item.options.id)
|
||||
.mapValues((idItems) => idItems[0]!.content)
|
||||
.value(),
|
||||
)
|
||||
.value();
|
||||
}
|
||||
|
||||
// TODO refactor and make this side-effect-free
|
||||
// If the function was pure, we could more easily compare previous/next values
|
||||
// on site reloads, and bail-out of the reload process earlier
|
||||
// createData() modules should rather be declarative
|
||||
async function executePluginContentLoaded({
|
||||
plugin,
|
||||
context,
|
||||
allContent,
|
||||
}: {
|
||||
plugin: LoadedPlugin;
|
||||
context: LoadContext;
|
||||
// TODO AllContent was injected to this lifecycle for the debug plugin
|
||||
// This is what permits to create the debug routes for all other plugins
|
||||
// This was likely a bad idea and prevents to start executing contentLoaded()
|
||||
// until all plugins have finished loading all the data
|
||||
// we'd rather remove this and find another way to implement the debug plugin
|
||||
// A possible solution: make it a core feature instead of a plugin?
|
||||
allContent: AllContent;
|
||||
}): Promise<{routes: RouteConfig[]; globalData: unknown}> {
|
||||
return PerfLogger.async(
|
||||
`Plugins - contentLoaded - ${plugin.name}@${plugin.options.id}`,
|
||||
async () => {
|
||||
if (!plugin.contentLoaded) {
|
||||
return {routes: [], globalData: undefined};
|
||||
}
|
||||
|
||||
const pluginId = plugin.options.id;
|
||||
// Plugins data files are namespaced by pluginName/pluginId
|
||||
const dataDir = path.join(
|
||||
context.generatedFilesDir,
|
||||
plugin.name,
|
||||
pluginId,
|
||||
);
|
||||
const pluginRouteContextModulePath = path.join(
|
||||
dataDir,
|
||||
`${docuHash('pluginRouteContextModule')}.json`,
|
||||
);
|
||||
const pluginRouteContext: PluginRouteContext['plugin'] = {
|
||||
name: plugin.name,
|
||||
id: pluginId,
|
||||
};
|
||||
await generate(
|
||||
'/',
|
||||
pluginRouteContextModulePath,
|
||||
JSON.stringify(pluginRouteContext, null, 2),
|
||||
);
|
||||
|
||||
const routes: RouteConfig[] = [];
|
||||
let globalData: unknown;
|
||||
|
||||
const actions: PluginContentLoadedActions = {
|
||||
addRoute(initialRouteConfig) {
|
||||
// Trailing slash behavior is handled generically for all plugins
|
||||
const finalRouteConfig = applyRouteTrailingSlash(
|
||||
initialRouteConfig,
|
||||
context.siteConfig,
|
||||
);
|
||||
routes.push({
|
||||
...finalRouteConfig,
|
||||
context: {
|
||||
...(finalRouteConfig.context && {data: finalRouteConfig.context}),
|
||||
plugin: pluginRouteContextModulePath,
|
||||
},
|
||||
});
|
||||
},
|
||||
async createData(name, data) {
|
||||
const modulePath = path.join(dataDir, name);
|
||||
await generate(dataDir, name, data);
|
||||
return modulePath;
|
||||
},
|
||||
setGlobalData(data) {
|
||||
globalData = data;
|
||||
},
|
||||
};
|
||||
|
||||
await plugin.contentLoaded({
|
||||
content: plugin.content,
|
||||
actions,
|
||||
allContent,
|
||||
});
|
||||
|
||||
return {routes, globalData};
|
||||
},
|
||||
);
|
||||
}
|
||||
|
||||
async function executePluginsContentLoaded({
|
||||
plugins,
|
||||
context,
|
||||
}: {
|
||||
plugins: LoadedPlugin[];
|
||||
context: LoadContext;
|
||||
}): Promise<{routes: RouteConfig[]; globalData: GlobalData}> {
|
||||
return PerfLogger.async(`Plugins - contentLoaded`, async () => {
|
||||
const allContent = aggregateAllContent(plugins);
|
||||
|
||||
const routes: RouteConfig[] = [];
|
||||
const globalData: GlobalData = {};
|
||||
|
||||
await Promise.all(
|
||||
plugins.map(async (plugin) => {
|
||||
const {routes: pluginRoutes, globalData: pluginGlobalData} =
|
||||
await executePluginContentLoaded({
|
||||
plugin,
|
||||
context,
|
||||
allContent,
|
||||
});
|
||||
|
||||
routes.push(...pluginRoutes);
|
||||
|
||||
if (pluginGlobalData !== undefined) {
|
||||
globalData[plugin.name] ??= {};
|
||||
globalData[plugin.name]![plugin.options.id] = pluginGlobalData;
|
||||
}
|
||||
}),
|
||||
);
|
||||
|
||||
// Sort the route config.
|
||||
// This ensures that route with sub routes are always placed last.
|
||||
sortRoutes(routes, context.siteConfig.baseUrl);
|
||||
|
||||
return {routes, globalData};
|
||||
});
|
||||
}
|
||||
|
||||
export type LoadPluginsResult = {
|
||||
plugins: LoadedPlugin[];
|
||||
routes: RouteConfig[];
|
||||
globalData: GlobalData;
|
||||
};
|
||||
|
||||
/**
|
||||
* Initializes the plugins, runs `loadContent`, `translateContent`,
|
||||
* `contentLoaded`, and `translateThemeConfig`. Because `contentLoaded` is
|
||||
* side-effect-ful (it generates temp files), so is this function. This function
|
||||
* would also mutate `context.siteConfig.themeConfig` to translate it.
|
||||
*/
|
||||
export async function loadPlugins(
|
||||
context: LoadContext,
|
||||
): Promise<LoadPluginsResult> {
|
||||
return PerfLogger.async('Plugins - loadPlugins', async () => {
|
||||
// 1. Plugin Lifecycle - Initialization/Constructor.
|
||||
const plugins: InitializedPlugin[] = await PerfLogger.async(
|
||||
'Plugins - initPlugins',
|
||||
() => initPlugins(context),
|
||||
);
|
||||
|
||||
plugins.push(
|
||||
createBootstrapPlugin(context),
|
||||
createMDXFallbackPlugin(context),
|
||||
);
|
||||
|
||||
// 2. Plugin Lifecycle - loadContent.
|
||||
const loadedPlugins = await executePluginsLoadContent({plugins, context});
|
||||
|
||||
// 3. Plugin Lifecycle - contentLoaded.
|
||||
const {routes, globalData} = await executePluginsContentLoaded({
|
||||
plugins: loadedPlugins,
|
||||
context,
|
||||
});
|
||||
|
||||
return {plugins: loadedPlugins, routes, globalData};
|
||||
});
|
||||
}
|
||||
|
||||
export function getPluginByIdentifier({
|
||||
plugins,
|
||||
pluginIdentifier,
|
||||
}: {
|
||||
pluginIdentifier: PluginIdentifier;
|
||||
plugins: LoadedPlugin[];
|
||||
}): LoadedPlugin {
|
||||
const plugin = plugins.find(
|
||||
(p) =>
|
||||
p.name === pluginIdentifier.name && p.options.id === pluginIdentifier.id,
|
||||
);
|
||||
if (!plugin) {
|
||||
throw new Error(
|
||||
logger.interpolate`Plugin not found for identifier ${pluginIdentifier.name}@${pluginIdentifier.id}`,
|
||||
);
|
||||
}
|
||||
return plugin;
|
||||
}
|
||||
|
||||
export async function reloadPlugin({
|
||||
pluginIdentifier,
|
||||
plugins,
|
||||
context,
|
||||
}: {
|
||||
pluginIdentifier: PluginIdentifier;
|
||||
plugins: LoadedPlugin[];
|
||||
context: LoadContext;
|
||||
}): Promise<LoadPluginsResult> {
|
||||
return PerfLogger.async('Plugins - reloadPlugin', async () => {
|
||||
const plugin = getPluginByIdentifier({plugins, pluginIdentifier});
|
||||
|
||||
const reloadedPlugin = await executePluginLoadContent({plugin, context});
|
||||
const newPlugins = plugins.with(plugins.indexOf(plugin), reloadedPlugin);
|
||||
|
||||
// Unfortunately, due to the "AllContent" data we have to re-execute this
|
||||
// for all plugins, not just the one to reload...
|
||||
const {routes, globalData} = await executePluginsContentLoaded({
|
||||
plugins: newPlugins,
|
||||
context,
|
||||
});
|
||||
|
||||
return {plugins: newPlugins, routes, globalData};
|
||||
});
|
||||
}
|
|
@ -27,7 +27,7 @@ export function applyRouteTrailingSlash(
|
|||
};
|
||||
}
|
||||
|
||||
export function sortConfig(
|
||||
export function sortRoutes(
|
||||
routeConfigs: RouteConfig[],
|
||||
baseUrl: string = '/',
|
||||
): void {
|
||||
|
@ -64,7 +64,7 @@ export function sortConfig(
|
|||
|
||||
routeConfigs.forEach((routeConfig) => {
|
||||
if (routeConfig.routes) {
|
||||
sortConfig(routeConfig.routes, baseUrl);
|
||||
sortRoutes(routeConfig.routes, baseUrl);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
|
|
@ -5,210 +5,26 @@
|
|||
* LICENSE file in the root directory of this source tree.
|
||||
*/
|
||||
|
||||
import query from 'querystring';
|
||||
import _ from 'lodash';
|
||||
import logger from '@docusaurus/logger';
|
||||
import {
|
||||
docuHash,
|
||||
normalizeUrl,
|
||||
simpleHash,
|
||||
escapePath,
|
||||
} from '@docusaurus/utils';
|
||||
import {getAllFinalRoutes} from './utils';
|
||||
import type {
|
||||
Module,
|
||||
RouteConfig,
|
||||
RouteModules,
|
||||
ChunkNames,
|
||||
RouteChunkNames,
|
||||
ReportingSeverity,
|
||||
} from '@docusaurus/types';
|
||||
import {normalizeUrl} from '@docusaurus/utils';
|
||||
import type {RouteConfig, ReportingSeverity} from '@docusaurus/types';
|
||||
|
||||
type LoadedRoutes = {
|
||||
/** Serialized routes config that can be directly emitted into temp file. */
|
||||
routesConfig: string;
|
||||
/** @see {ChunkNames} */
|
||||
routesChunkNames: RouteChunkNames;
|
||||
/**
|
||||
* A map from chunk name to module paths. Module paths would have backslash
|
||||
* escaped already, so they can be directly printed.
|
||||
*/
|
||||
registry: {
|
||||
[chunkName: string]: string;
|
||||
};
|
||||
/**
|
||||
* Collect all page paths for injecting it later in the plugin lifecycle.
|
||||
* This is useful for plugins like sitemaps, redirects etc... Only collects
|
||||
* "actual" pages, i.e. those without subroutes, because if a route has
|
||||
* subroutes, it is probably a wrapper.
|
||||
*/
|
||||
routesPaths: string[];
|
||||
};
|
||||
|
||||
/** Indents every line of `str` by one level. */
|
||||
function indent(str: string) {
|
||||
return ` ${str.replace(/\n/g, `\n `)}`;
|
||||
// Recursively get the final routes (routes with no subroutes)
|
||||
export function getAllFinalRoutes(routeConfig: RouteConfig[]): RouteConfig[] {
|
||||
function getFinalRoutes(route: RouteConfig): RouteConfig[] {
|
||||
return route.routes ? route.routes.flatMap(getFinalRoutes) : [route];
|
||||
}
|
||||
|
||||
const chunkNameCache = new Map<string, string>();
|
||||
const chunkNameCount = new Map<string, number>();
|
||||
|
||||
/**
|
||||
* Generates a unique chunk name that can be used in the chunk registry.
|
||||
*
|
||||
* @param modulePath A path to generate chunk name from. The actual value has no
|
||||
* semantic significance.
|
||||
* @param prefix A prefix to append to the chunk name, to avoid name clash.
|
||||
* @param preferredName Chunk names default to `modulePath`, and this can supply
|
||||
* a more human-readable name.
|
||||
* @param shortId When `true`, the chunk name would only be a hash without any
|
||||
* other characters. Useful for bundle size. Defaults to `true` in production.
|
||||
*/
|
||||
export function genChunkName(
|
||||
modulePath: string,
|
||||
prefix?: string,
|
||||
preferredName?: string,
|
||||
shortId: boolean = process.env.NODE_ENV === 'production',
|
||||
): string {
|
||||
let chunkName = chunkNameCache.get(modulePath);
|
||||
if (!chunkName) {
|
||||
if (shortId) {
|
||||
chunkName = simpleHash(modulePath, 8);
|
||||
} else {
|
||||
let str = modulePath;
|
||||
if (preferredName) {
|
||||
const shortHash = simpleHash(modulePath, 3);
|
||||
str = `${preferredName}${shortHash}`;
|
||||
}
|
||||
const name = docuHash(str);
|
||||
chunkName = prefix ? `${prefix}---${name}` : name;
|
||||
}
|
||||
const seenCount = (chunkNameCount.get(chunkName) ?? 0) + 1;
|
||||
if (seenCount > 1) {
|
||||
chunkName += seenCount.toString(36);
|
||||
}
|
||||
chunkNameCache.set(modulePath, chunkName);
|
||||
chunkNameCount.set(chunkName, seenCount);
|
||||
}
|
||||
return chunkName;
|
||||
}
|
||||
|
||||
/**
|
||||
* Takes a piece of route config, and serializes it into raw JS code. The shape
|
||||
* is the same as react-router's `RouteConfig`. Formatting is similar to
|
||||
* `JSON.stringify` but without all the quotes.
|
||||
*/
|
||||
function serializeRouteConfig({
|
||||
routePath,
|
||||
routeHash,
|
||||
exact,
|
||||
subroutesCodeStrings,
|
||||
props,
|
||||
}: {
|
||||
routePath: string;
|
||||
routeHash: string;
|
||||
exact?: boolean;
|
||||
subroutesCodeStrings?: string[];
|
||||
props: {[propName: string]: unknown};
|
||||
}) {
|
||||
const parts = [
|
||||
`path: '${routePath}'`,
|
||||
`component: ComponentCreator('${routePath}', '${routeHash}')`,
|
||||
];
|
||||
|
||||
if (exact) {
|
||||
parts.push(`exact: true`);
|
||||
}
|
||||
|
||||
if (subroutesCodeStrings) {
|
||||
parts.push(
|
||||
`routes: [
|
||||
${indent(subroutesCodeStrings.join(',\n'))}
|
||||
]`,
|
||||
);
|
||||
}
|
||||
|
||||
Object.entries(props).forEach(([propName, propValue]) => {
|
||||
const isIdentifier =
|
||||
/^[$_\p{ID_Start}][$\u200c\u200d\p{ID_Continue}]*$/u.test(propName);
|
||||
const key = isIdentifier ? propName : JSON.stringify(propName);
|
||||
parts.push(`${key}: ${JSON.stringify(propValue)}`);
|
||||
});
|
||||
|
||||
return `{
|
||||
${indent(parts.join(',\n'))}
|
||||
}`;
|
||||
}
|
||||
|
||||
const isModule = (value: unknown): value is Module =>
|
||||
typeof value === 'string' ||
|
||||
(typeof value === 'object' &&
|
||||
// eslint-disable-next-line no-underscore-dangle
|
||||
!!(value as {[key: string]: unknown} | null)?.__import);
|
||||
|
||||
/**
|
||||
* Takes a {@link Module} (which is nothing more than a path plus some metadata
|
||||
* like query) and returns the string path it represents.
|
||||
*/
|
||||
function getModulePath(target: Module): string {
|
||||
if (typeof target === 'string') {
|
||||
return target;
|
||||
}
|
||||
const queryStr = target.query ? `?${query.stringify(target.query)}` : '';
|
||||
return `${target.path}${queryStr}`;
|
||||
}
|
||||
|
||||
/**
|
||||
* Takes a route module (which is a tree of modules), and transforms each module
|
||||
* into a chunk name. It also mutates `res.registry` and registers the loaders
|
||||
* for each chunk.
|
||||
*
|
||||
* @param routeModule One route module to be transformed.
|
||||
* @param prefix Prefix passed to {@link genChunkName}.
|
||||
* @param name Preferred name passed to {@link genChunkName}.
|
||||
* @param res The route structures being loaded.
|
||||
*/
|
||||
function genChunkNames(
|
||||
routeModule: RouteModules,
|
||||
prefix: string,
|
||||
name: string,
|
||||
res: LoadedRoutes,
|
||||
): ChunkNames;
|
||||
function genChunkNames(
|
||||
routeModule: RouteModules | RouteModules[] | Module,
|
||||
prefix: string,
|
||||
name: string,
|
||||
res: LoadedRoutes,
|
||||
): ChunkNames | ChunkNames[] | string;
|
||||
function genChunkNames(
|
||||
routeModule: RouteModules | RouteModules[] | Module,
|
||||
prefix: string,
|
||||
name: string,
|
||||
res: LoadedRoutes,
|
||||
): string | ChunkNames | ChunkNames[] {
|
||||
if (isModule(routeModule)) {
|
||||
// This is a leaf node, no need to recurse
|
||||
const modulePath = getModulePath(routeModule);
|
||||
const chunkName = genChunkName(modulePath, prefix, name);
|
||||
res.registry[chunkName] = escapePath(modulePath);
|
||||
return chunkName;
|
||||
}
|
||||
if (Array.isArray(routeModule)) {
|
||||
return routeModule.map((val, index) =>
|
||||
genChunkNames(val, `${index}`, name, res),
|
||||
);
|
||||
}
|
||||
return _.mapValues(routeModule, (v, key) => genChunkNames(v, key, name, res));
|
||||
return routeConfig.flatMap(getFinalRoutes);
|
||||
}
|
||||
|
||||
export function handleDuplicateRoutes(
|
||||
pluginsRouteConfigs: RouteConfig[],
|
||||
routes: RouteConfig[],
|
||||
onDuplicateRoutes: ReportingSeverity,
|
||||
): void {
|
||||
if (onDuplicateRoutes === 'ignore') {
|
||||
return;
|
||||
}
|
||||
const allRoutes: string[] = getAllFinalRoutes(pluginsRouteConfigs).map(
|
||||
const allRoutes: string[] = getAllFinalRoutes(routes).map(
|
||||
(routeConfig) => routeConfig.path,
|
||||
);
|
||||
const seenRoutes = new Set<string>();
|
||||
|
@ -230,52 +46,6 @@ This could lead to non-deterministic routing behavior.`;
|
|||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* This is the higher level overview of route code generation. For each route
|
||||
* config node, it returns the node's serialized form, and mutates `registry`,
|
||||
* `routesPaths`, and `routesChunkNames` accordingly.
|
||||
*/
|
||||
function genRouteCode(routeConfig: RouteConfig, res: LoadedRoutes): string {
|
||||
const {
|
||||
path: routePath,
|
||||
component,
|
||||
modules = {},
|
||||
context,
|
||||
routes: subroutes,
|
||||
priority,
|
||||
exact,
|
||||
...props
|
||||
} = routeConfig;
|
||||
|
||||
if (typeof routePath !== 'string' || !component) {
|
||||
throw new Error(
|
||||
`Invalid route config: path must be a string and component is required.
|
||||
${JSON.stringify(routeConfig)}`,
|
||||
);
|
||||
}
|
||||
|
||||
if (!subroutes) {
|
||||
res.routesPaths.push(routePath);
|
||||
}
|
||||
|
||||
const routeHash = simpleHash(JSON.stringify(routeConfig), 3);
|
||||
res.routesChunkNames[`${routePath}-${routeHash}`] = {
|
||||
// Avoid clash with a prop called "component"
|
||||
...genChunkNames({__comp: component}, 'component', component, res),
|
||||
...(context &&
|
||||
genChunkNames({__context: context}, 'context', routePath, res)),
|
||||
...genChunkNames(modules, 'module', routePath, res),
|
||||
};
|
||||
|
||||
return serializeRouteConfig({
|
||||
routePath: routePath.replace(/'/g, "\\'"),
|
||||
routeHash,
|
||||
subroutesCodeStrings: subroutes?.map((r) => genRouteCode(r, res)),
|
||||
exact,
|
||||
props,
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Old stuff
|
||||
* As far as I understand, this is what permits to SSG the 404.html file
|
||||
|
@ -285,45 +55,12 @@ ${JSON.stringify(routeConfig)}`,
|
|||
*/
|
||||
const NotFoundRoutePath = '/404.html';
|
||||
|
||||
/**
|
||||
* Routes are prepared into three temp files:
|
||||
*
|
||||
* - `routesConfig`, the route config passed to react-router. This file is kept
|
||||
* minimal, because it can't be code-splitted.
|
||||
* - `routesChunkNames`, a mapping from route paths (hashed) to code-splitted
|
||||
* chunk names.
|
||||
* - `registry`, a mapping from chunk names to options for react-loadable.
|
||||
*/
|
||||
export function loadRoutes(
|
||||
export function getRoutesPaths(
|
||||
routeConfigs: RouteConfig[],
|
||||
baseUrl: string,
|
||||
onDuplicateRoutes: ReportingSeverity,
|
||||
): LoadedRoutes {
|
||||
handleDuplicateRoutes(routeConfigs, onDuplicateRoutes);
|
||||
const res: LoadedRoutes = {
|
||||
// To be written by `genRouteCode`
|
||||
routesConfig: '',
|
||||
routesChunkNames: {},
|
||||
registry: {},
|
||||
routesPaths: [normalizeUrl([baseUrl, NotFoundRoutePath])],
|
||||
};
|
||||
|
||||
// `genRouteCode` would mutate `res`
|
||||
const routeConfigSerialized = routeConfigs
|
||||
.map((r) => genRouteCode(r, res))
|
||||
.join(',\n');
|
||||
|
||||
res.routesConfig = `import React from 'react';
|
||||
import ComponentCreator from '@docusaurus/ComponentCreator';
|
||||
|
||||
export default [
|
||||
${indent(routeConfigSerialized)},
|
||||
{
|
||||
path: '*',
|
||||
component: ComponentCreator('*'),
|
||||
},
|
||||
): string[] {
|
||||
return [
|
||||
normalizeUrl([baseUrl, NotFoundRoutePath]),
|
||||
...getAllFinalRoutes(routeConfigs).map((r) => r.path),
|
||||
];
|
||||
`;
|
||||
|
||||
return res;
|
||||
}
|
||||
|
|
276
packages/docusaurus/src/server/site.ts
Normal file
276
packages/docusaurus/src/server/site.ts
Normal file
|
@ -0,0 +1,276 @@
|
|||
/**
|
||||
* Copyright (c) Facebook, Inc. and its affiliates.
|
||||
*
|
||||
* This source code is licensed under the MIT license found in the
|
||||
* LICENSE file in the root directory of this source tree.
|
||||
*/
|
||||
|
||||
import path from 'path';
|
||||
import {
|
||||
localizePath,
|
||||
DEFAULT_BUILD_DIR_NAME,
|
||||
GENERATED_FILES_DIR_NAME,
|
||||
} from '@docusaurus/utils';
|
||||
import combinePromises from 'combine-promises';
|
||||
import {loadSiteConfig} from './config';
|
||||
import {loadClientModules} from './clientModules';
|
||||
import {loadPlugins, reloadPlugin} from './plugins/plugins';
|
||||
import {loadHtmlTags} from './htmlTags';
|
||||
import {loadSiteMetadata} from './siteMetadata';
|
||||
import {loadI18n} from './i18n';
|
||||
import {
|
||||
loadSiteCodeTranslations,
|
||||
getPluginsDefaultCodeTranslationMessages,
|
||||
} from './translations/translations';
|
||||
import {PerfLogger} from '../utils';
|
||||
import {generateSiteFiles} from './codegen/codegen';
|
||||
import {getRoutesPaths, handleDuplicateRoutes} from './routes';
|
||||
import type {LoadPluginsResult} from './plugins/plugins';
|
||||
import type {
|
||||
DocusaurusConfig,
|
||||
GlobalData,
|
||||
LoadContext,
|
||||
Props,
|
||||
} from '@docusaurus/types';
|
||||
import type {PluginIdentifier} from '@docusaurus/types/src/plugin';
|
||||
|
||||
export type LoadContextParams = {
|
||||
/** Usually the CWD; can be overridden with command argument. */
|
||||
siteDir: string;
|
||||
/** Custom output directory. Can be customized with `--out-dir` option */
|
||||
outDir?: string;
|
||||
/** Custom config path. Can be customized with `--config` option */
|
||||
config?: string;
|
||||
/** Default is `i18n.defaultLocale` */
|
||||
locale?: string;
|
||||
/**
|
||||
* `true` means the paths will have the locale prepended; `false` means they
|
||||
* won't (useful for `yarn build -l zh-Hans` where the output should be
|
||||
* emitted into `build/` instead of `build/zh-Hans/`); `undefined` is like the
|
||||
* "smart" option where only non-default locale paths are localized
|
||||
*/
|
||||
localizePath?: boolean;
|
||||
};
|
||||
|
||||
export type LoadSiteParams = LoadContextParams;
|
||||
|
||||
export type Site = {
|
||||
props: Props;
|
||||
params: LoadSiteParams;
|
||||
};
|
||||
|
||||
/**
|
||||
* Loading context is the very first step in site building. Its params are
|
||||
* directly acquired from CLI options. It mainly loads `siteConfig` and the i18n
|
||||
* context (which includes code translations). The `LoadContext` will be passed
|
||||
* to plugin constructors.
|
||||
*/
|
||||
export async function loadContext(
|
||||
params: LoadContextParams,
|
||||
): Promise<LoadContext> {
|
||||
const {
|
||||
siteDir,
|
||||
outDir: baseOutDir = DEFAULT_BUILD_DIR_NAME,
|
||||
locale,
|
||||
config: customConfigFilePath,
|
||||
} = params;
|
||||
const generatedFilesDir = path.resolve(siteDir, GENERATED_FILES_DIR_NAME);
|
||||
|
||||
const {siteConfig: initialSiteConfig, siteConfigPath} = await loadSiteConfig({
|
||||
siteDir,
|
||||
customConfigFilePath,
|
||||
});
|
||||
|
||||
const i18n = await loadI18n(initialSiteConfig, {locale});
|
||||
|
||||
const baseUrl = localizePath({
|
||||
path: initialSiteConfig.baseUrl,
|
||||
i18n,
|
||||
options: params,
|
||||
pathType: 'url',
|
||||
});
|
||||
const outDir = localizePath({
|
||||
path: path.resolve(siteDir, baseOutDir),
|
||||
i18n,
|
||||
options: params,
|
||||
pathType: 'fs',
|
||||
});
|
||||
const localizationDir = path.resolve(
|
||||
siteDir,
|
||||
i18n.path,
|
||||
i18n.localeConfigs[i18n.currentLocale]!.path,
|
||||
);
|
||||
|
||||
const siteConfig: DocusaurusConfig = {...initialSiteConfig, baseUrl};
|
||||
|
||||
const codeTranslations = await loadSiteCodeTranslations({localizationDir});
|
||||
|
||||
return {
|
||||
siteDir,
|
||||
generatedFilesDir,
|
||||
localizationDir,
|
||||
siteConfig,
|
||||
siteConfigPath,
|
||||
outDir,
|
||||
baseUrl,
|
||||
i18n,
|
||||
codeTranslations,
|
||||
};
|
||||
}
|
||||
|
||||
async function createSiteProps(
|
||||
params: LoadPluginsResult & {context: LoadContext},
|
||||
): Promise<Props> {
|
||||
const {plugins, routes, context} = params;
|
||||
const {
|
||||
generatedFilesDir,
|
||||
siteDir,
|
||||
siteConfig,
|
||||
siteConfigPath,
|
||||
outDir,
|
||||
baseUrl,
|
||||
i18n,
|
||||
localizationDir,
|
||||
codeTranslations: siteCodeTranslations,
|
||||
} = context;
|
||||
|
||||
const {headTags, preBodyTags, postBodyTags} = loadHtmlTags(plugins);
|
||||
|
||||
const {codeTranslations, siteMetadata} = await combinePromises({
|
||||
// TODO code translations should be loaded as part of LoadedPlugin?
|
||||
codeTranslations: PerfLogger.async(
|
||||
'Load - loadCodeTranslations',
|
||||
async () => ({
|
||||
...(await getPluginsDefaultCodeTranslationMessages(plugins)),
|
||||
...siteCodeTranslations,
|
||||
}),
|
||||
),
|
||||
siteMetadata: PerfLogger.async('Load - loadSiteMetadata', () =>
|
||||
loadSiteMetadata({plugins, siteDir}),
|
||||
),
|
||||
});
|
||||
|
||||
handleDuplicateRoutes(routes, siteConfig.onDuplicateRoutes);
|
||||
const routesPaths = getRoutesPaths(routes, baseUrl);
|
||||
|
||||
return {
|
||||
siteConfig,
|
||||
siteConfigPath,
|
||||
siteMetadata,
|
||||
siteDir,
|
||||
outDir,
|
||||
baseUrl,
|
||||
i18n,
|
||||
localizationDir,
|
||||
generatedFilesDir,
|
||||
routes,
|
||||
routesPaths,
|
||||
plugins,
|
||||
headTags,
|
||||
preBodyTags,
|
||||
postBodyTags,
|
||||
codeTranslations,
|
||||
};
|
||||
}
|
||||
|
||||
// TODO global data should be part of site props?
|
||||
async function createSiteFiles({
|
||||
site,
|
||||
globalData,
|
||||
}: {
|
||||
site: Site;
|
||||
globalData: GlobalData;
|
||||
}) {
|
||||
return PerfLogger.async('Load - createSiteFiles', async () => {
|
||||
const {
|
||||
props: {
|
||||
plugins,
|
||||
generatedFilesDir,
|
||||
siteConfig,
|
||||
siteMetadata,
|
||||
i18n,
|
||||
codeTranslations,
|
||||
routes,
|
||||
baseUrl,
|
||||
},
|
||||
} = site;
|
||||
const clientModules = loadClientModules(plugins);
|
||||
await generateSiteFiles({
|
||||
generatedFilesDir,
|
||||
clientModules,
|
||||
siteConfig,
|
||||
siteMetadata,
|
||||
i18n,
|
||||
codeTranslations,
|
||||
globalData,
|
||||
routes,
|
||||
baseUrl,
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* This is the crux of the Docusaurus server-side. It reads everything it needs—
|
||||
* code translations, config file, plugin modules... Plugins then use their
|
||||
* lifecycles to generate content and other data. It is side-effect-ful because
|
||||
* it generates temp files in the `.docusaurus` folder for the bundler.
|
||||
*/
|
||||
export async function loadSite(params: LoadContextParams): Promise<Site> {
|
||||
PerfLogger.start('Load - loadContext');
|
||||
const context = await loadContext(params);
|
||||
PerfLogger.end('Load - loadContext');
|
||||
|
||||
PerfLogger.start('Load - loadPlugins');
|
||||
const {plugins, routes, globalData} = await loadPlugins(context);
|
||||
PerfLogger.end('Load - loadPlugins');
|
||||
|
||||
const props = await createSiteProps({plugins, routes, globalData, context});
|
||||
|
||||
const site: Site = {props, params};
|
||||
|
||||
await createSiteFiles({
|
||||
site,
|
||||
globalData,
|
||||
});
|
||||
|
||||
return site;
|
||||
}
|
||||
|
||||
export async function reloadSite(site: Site): Promise<Site> {
|
||||
// TODO this can be optimized, for example:
|
||||
// - plugins loading same data as before should not recreate routes/bundles
|
||||
// - codegen does not need to re-run if nothing changed
|
||||
return loadSite(site.params);
|
||||
}
|
||||
|
||||
export async function reloadSitePlugin(
|
||||
site: Site,
|
||||
pluginIdentifier: PluginIdentifier,
|
||||
): Promise<Site> {
|
||||
console.log(
|
||||
`reloadSitePlugin ${pluginIdentifier.name}@${pluginIdentifier.id}`,
|
||||
);
|
||||
|
||||
const {plugins, routes, globalData} = await reloadPlugin({
|
||||
pluginIdentifier,
|
||||
plugins: site.props.plugins,
|
||||
context: site.props,
|
||||
});
|
||||
|
||||
const newProps = await createSiteProps({
|
||||
plugins,
|
||||
routes,
|
||||
globalData,
|
||||
context: site.props, // Props extends Context
|
||||
});
|
||||
|
||||
const newSite: Site = {
|
||||
props: newProps,
|
||||
params: site.params,
|
||||
};
|
||||
|
||||
// TODO optimize, bypass useless codegen if new site is similar to old site
|
||||
await createSiteFiles({site: newSite, globalData});
|
||||
|
||||
return newSite;
|
||||
}
|
|
@ -279,3 +279,15 @@ Please report this Docusaurus issue. name=${unusedDefaultCodeMessages}`;
|
|||
}),
|
||||
);
|
||||
}
|
||||
|
||||
export async function loadSiteCodeTranslations({
|
||||
localizationDir,
|
||||
}: {
|
||||
localizationDir: string;
|
||||
}): Promise<CodeTranslations> {
|
||||
const codeTranslationFileContent =
|
||||
(await readCodeTranslationFileContent({localizationDir})) ?? {};
|
||||
|
||||
// We only need key->message for code translations
|
||||
return _.mapValues(codeTranslationFileContent, (value) => value.message);
|
||||
}
|
||||
|
|
|
@ -7,15 +7,6 @@
|
|||
|
||||
import path from 'path';
|
||||
import {posixPath, Globby} from '@docusaurus/utils';
|
||||
import type {RouteConfig} from '@docusaurus/types';
|
||||
|
||||
// Recursively get the final routes (routes with no subroutes)
|
||||
export function getAllFinalRoutes(routeConfig: RouteConfig[]): RouteConfig[] {
|
||||
function getFinalRoutes(route: RouteConfig): RouteConfig[] {
|
||||
return route.routes ? route.routes.flatMap(getFinalRoutes) : [route];
|
||||
}
|
||||
return routeConfig.flatMap(getFinalRoutes);
|
||||
}
|
||||
|
||||
// Globby that fix Windows path patterns
|
||||
// See https://github.com/facebook/docusaurus/pull/4222#issuecomment-795517329
|
||||
|
|
|
@ -15,6 +15,10 @@ type PerfLoggerAPI = {
|
|||
start: (label: string) => void;
|
||||
end: (label: string) => void;
|
||||
log: (message: string) => void;
|
||||
async: <Result>(
|
||||
label: string,
|
||||
asyncFn: () => Result | Promise<Result>,
|
||||
) => Promise<Result>;
|
||||
};
|
||||
|
||||
function createPerfLogger(): PerfLoggerAPI {
|
||||
|
@ -24,14 +28,31 @@ function createPerfLogger(): PerfLoggerAPI {
|
|||
start: noop,
|
||||
end: noop,
|
||||
log: noop,
|
||||
async: async (_label, asyncFn) => asyncFn(),
|
||||
};
|
||||
}
|
||||
|
||||
const prefix = logger.yellow(`[PERF] `);
|
||||
|
||||
const start: PerfLoggerAPI['start'] = (label) => console.time(prefix + label);
|
||||
|
||||
const end: PerfLoggerAPI['end'] = (label) => console.timeEnd(prefix + label);
|
||||
|
||||
const log: PerfLoggerAPI['log'] = (label: string) =>
|
||||
console.log(prefix + label);
|
||||
|
||||
const async: PerfLoggerAPI['async'] = async (label, asyncFn) => {
|
||||
start(label);
|
||||
const result = await asyncFn();
|
||||
end(label);
|
||||
return result;
|
||||
};
|
||||
|
||||
return {
|
||||
start: (label) => console.time(prefix + label),
|
||||
end: (label) => console.timeEnd(prefix + label),
|
||||
log: (label) => console.log(prefix + label),
|
||||
start,
|
||||
end,
|
||||
log,
|
||||
async,
|
||||
};
|
||||
}
|
||||
|
||||
|
|
|
@ -12,26 +12,42 @@ import {loadSetup} from '../../server/__tests__/testUtils';
|
|||
|
||||
describe('webpack dev config', () => {
|
||||
it('simple start', async () => {
|
||||
const props = await loadSetup('simple-site');
|
||||
const {clientConfig} = await createStartClientConfig({props});
|
||||
const {props} = await loadSetup('simple-site');
|
||||
const {clientConfig} = await createStartClientConfig({
|
||||
props,
|
||||
minify: false,
|
||||
poll: false,
|
||||
});
|
||||
webpack.validate(clientConfig);
|
||||
});
|
||||
|
||||
it('simple build', async () => {
|
||||
const props = await loadSetup('simple-site');
|
||||
const {config} = await createBuildClientConfig({props});
|
||||
const {props} = await loadSetup('simple-site');
|
||||
const {config} = await createBuildClientConfig({
|
||||
props,
|
||||
minify: false,
|
||||
bundleAnalyzer: false,
|
||||
});
|
||||
webpack.validate(config);
|
||||
});
|
||||
|
||||
it('custom start', async () => {
|
||||
const props = await loadSetup('custom-site');
|
||||
const {clientConfig} = await createStartClientConfig({props});
|
||||
const {props} = await loadSetup('custom-site');
|
||||
const {clientConfig} = await createStartClientConfig({
|
||||
props,
|
||||
minify: false,
|
||||
poll: false,
|
||||
});
|
||||
webpack.validate(clientConfig);
|
||||
});
|
||||
|
||||
it('custom build', async () => {
|
||||
const props = await loadSetup('custom-site');
|
||||
const {config} = await createBuildClientConfig({props});
|
||||
const {props} = await loadSetup('custom-site');
|
||||
const {config} = await createBuildClientConfig({
|
||||
props,
|
||||
minify: false,
|
||||
bundleAnalyzer: false,
|
||||
});
|
||||
webpack.validate(config);
|
||||
});
|
||||
});
|
||||
|
|
|
@ -14,7 +14,7 @@ import {loadSetup} from '../../server/__tests__/testUtils';
|
|||
describe('webpack production config', () => {
|
||||
it('simple', async () => {
|
||||
jest.spyOn(console, 'log').mockImplementation(() => {});
|
||||
const props = await loadSetup('simple-site');
|
||||
const {props} = await loadSetup('simple-site');
|
||||
const {config} = await createServerConfig({
|
||||
props,
|
||||
});
|
||||
|
@ -23,7 +23,7 @@ describe('webpack production config', () => {
|
|||
|
||||
it('custom', async () => {
|
||||
jest.spyOn(console, 'log').mockImplementation(() => {});
|
||||
const props = await loadSetup('custom-site');
|
||||
const {props} = await loadSetup('custom-site');
|
||||
const {config} = await createServerConfig({
|
||||
props,
|
||||
});
|
||||
|
|
|
@ -47,6 +47,8 @@ changefreq
|
|||
Chedeau
|
||||
chedeau
|
||||
Clément
|
||||
Codegen
|
||||
codegen
|
||||
codesandbox
|
||||
Codespaces
|
||||
commonmark
|
||||
|
@ -284,6 +286,8 @@ redwoodjs
|
|||
refactorings
|
||||
Rehype
|
||||
rehype
|
||||
Reloadable
|
||||
reloadable
|
||||
renderable
|
||||
REPONAME
|
||||
Retrocompatibility
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue