mirror of
https://github.com/facebook/docusaurus.git
synced 2025-05-15 18:17:35 +02:00
feat(v2): code-split metadata out of routes (#1359)
* move assets-manifest to generatedFilesDir * rename generateChunkName -> genChunkName * implement docuHash and genComponentName * feat(v2): code-split routes and metadata * don't code split component code out * simplify metadata path * nits * fix test * address review
This commit is contained in:
parent
866f66241b
commit
f0dc68d01a
11 changed files with 249 additions and 121 deletions
|
@ -1,7 +1,7 @@
|
|||
{
|
||||
"name": "@docusaurus/utils",
|
||||
"version": "2.0.0-alpha.5",
|
||||
"description": "A set of utility functions for Docusaurus packages",
|
||||
"description": "Node utility functions for Docusaurus packages",
|
||||
"main": "src/index.js",
|
||||
"publishConfig": {
|
||||
"access": "public"
|
||||
|
@ -11,6 +11,6 @@
|
|||
"escape-string-regexp": "^1.0.5",
|
||||
"front-matter": "^3.0.1",
|
||||
"fs-extra": "^7.0.0",
|
||||
"kebab-hash": "^0.1.2"
|
||||
"lodash": "^4.17.11"
|
||||
}
|
||||
}
|
||||
|
|
|
@ -8,8 +8,9 @@
|
|||
import path from 'path';
|
||||
import {
|
||||
fileToPath,
|
||||
fileToComponentName,
|
||||
generateChunkName,
|
||||
docuHash,
|
||||
genComponentName,
|
||||
genChunkName,
|
||||
idx,
|
||||
getSubFolder,
|
||||
normalizeUrl,
|
||||
|
@ -30,21 +31,36 @@ describe('load utils', () => {
|
|||
});
|
||||
});
|
||||
|
||||
test('fileToComponentName', () => {
|
||||
test('genComponentName', () => {
|
||||
const asserts = {
|
||||
'index.md': 'MDIndex',
|
||||
'hello/index.md': 'MDHelloIndex',
|
||||
'foo.md': 'MDFoo',
|
||||
'foo-bar.md': 'MDFooBar',
|
||||
'index.js': 'JSIndex',
|
||||
'foobar.js': 'JSFoobar',
|
||||
'docusaurus/index.js': 'JSDocusaurusIndex',
|
||||
'234.md': 'MD234',
|
||||
'2018-07-08-test.md': 'MD20180708Test',
|
||||
'%asd.md': 'MDAsd',
|
||||
'/': 'Index',
|
||||
'/foo-bar': 'FooBar096',
|
||||
'/foo/bar': 'FooBar1Df',
|
||||
'/blog/2017/12/14/introducing-docusaurus':
|
||||
'Blog20171214IntroducingDocusaurus8D2',
|
||||
'/blog/2017/12/14-introducing-docusaurus':
|
||||
'Blog20171214IntroducingDocusaurus0Bc',
|
||||
'/blog/201712/14-introducing-docusaurus':
|
||||
'Blog20171214IntroducingDocusaurusA93',
|
||||
};
|
||||
Object.keys(asserts).forEach(file => {
|
||||
expect(fileToComponentName(file)).toBe(asserts[file]);
|
||||
expect(genComponentName(file)).toBe(asserts[file]);
|
||||
});
|
||||
});
|
||||
|
||||
test('docuHash', () => {
|
||||
const asserts = {
|
||||
'': '-d41',
|
||||
'/': 'Index',
|
||||
'/foo-bar': 'foo-bar-096',
|
||||
'/foo/bar': 'foo-bar-1df',
|
||||
'/endi/lie': 'endi-lie-9fa',
|
||||
'/endi-lie': 'endi-lie-fd3',
|
||||
'/yangshun/tay': 'yangshun-tay-48d',
|
||||
'/yangshun-tay': 'yangshun-tay-f3b',
|
||||
};
|
||||
Object.keys(asserts).forEach(file => {
|
||||
expect(docuHash(file)).toBe(asserts[file]);
|
||||
});
|
||||
});
|
||||
|
||||
|
@ -64,7 +80,7 @@ describe('load utils', () => {
|
|||
});
|
||||
});
|
||||
|
||||
test('generateChunkName', () => {
|
||||
test('genChunkName', () => {
|
||||
const asserts = {
|
||||
'/docs/adding-blog': 'docs-adding-blog-062',
|
||||
'/docs/versioning': 'docs-versioning-8a8',
|
||||
|
@ -76,7 +92,7 @@ describe('load utils', () => {
|
|||
'/blog': 'blog-c06',
|
||||
};
|
||||
Object.keys(asserts).forEach(str => {
|
||||
expect(generateChunkName(str)).toBe(asserts[str]);
|
||||
expect(genChunkName(str)).toBe(asserts[str]);
|
||||
});
|
||||
});
|
||||
|
||||
|
|
|
@ -7,8 +7,9 @@
|
|||
|
||||
const path = require('path');
|
||||
const fm = require('front-matter');
|
||||
const {createHash} = require('crypto');
|
||||
|
||||
const kebabHash = require('kebab-hash');
|
||||
const _ = require(`lodash`);
|
||||
const escapeStringRegexp = require('escape-string-regexp');
|
||||
const fs = require('fs-extra');
|
||||
|
||||
|
@ -39,14 +40,37 @@ function encodePath(userpath) {
|
|||
.join('/');
|
||||
}
|
||||
|
||||
function fileToComponentName(file) {
|
||||
const ext = extRE.exec(file)[1];
|
||||
let str = file.replace(extRE, '');
|
||||
str = str.replace(/([A-Z])/g, ' $1');
|
||||
str = str.replace(/^[\W_]+|[\W_]+$/g, '').toLowerCase();
|
||||
str = str.charAt(0).toUpperCase() + str.slice(1);
|
||||
str = str.replace(/[\W_]+(\w|$)/g, (_, ch) => ch.toUpperCase());
|
||||
return ext ? ext.toUpperCase() + str : str;
|
||||
/**
|
||||
* Given an input string, convert to kebab-case and append a hash. Avoid str collision
|
||||
* @param {string} str input string
|
||||
* @returns {string}
|
||||
*/
|
||||
function docuHash(str) {
|
||||
if (str === '/') {
|
||||
return 'Index';
|
||||
}
|
||||
const shortHash = createHash('md5')
|
||||
.update(str)
|
||||
.digest('hex')
|
||||
.substr(0, 3);
|
||||
return `${_.kebabCase(str)}-${shortHash}`;
|
||||
}
|
||||
|
||||
/**
|
||||
* Generate unique React Component Name. E.g: /foo-bar -> FooBar096
|
||||
* @param {string} pagePath
|
||||
* @returns {string} unique react component name
|
||||
*/
|
||||
function genComponentName(pagePath) {
|
||||
if (pagePath === '/') {
|
||||
return 'Index';
|
||||
}
|
||||
const pageHash = docuHash(pagePath);
|
||||
const pascalCase = _.flow(
|
||||
_.camelCase,
|
||||
_.upperFirst,
|
||||
);
|
||||
return pascalCase(pageHash);
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -64,8 +88,8 @@ function posixPath(str) {
|
|||
return str.replace(/\\/g, '/');
|
||||
}
|
||||
|
||||
function generateChunkName(str, prefix) {
|
||||
const name = str === '/' ? 'index' : kebabHash(str);
|
||||
function genChunkName(str, prefix) {
|
||||
const name = str === '/' ? 'index' : docuHash(str);
|
||||
return prefix ? `${prefix}---${name}` : name;
|
||||
}
|
||||
|
||||
|
@ -160,10 +184,11 @@ function normalizeUrl(rawUrls) {
|
|||
|
||||
module.exports = {
|
||||
encodePath,
|
||||
docuHash,
|
||||
generate,
|
||||
fileToPath,
|
||||
fileToComponentName,
|
||||
generateChunkName,
|
||||
genComponentName,
|
||||
genChunkName,
|
||||
getSubFolder,
|
||||
idx,
|
||||
normalizeUrl,
|
||||
|
|
|
@ -13,7 +13,7 @@ import {Helmet} from 'react-helmet';
|
|||
import {getBundles} from 'react-loadable-ssr-addon';
|
||||
import Loadable from 'react-loadable';
|
||||
|
||||
import manifest from '@build/assets-manifest.json'; //eslint-disable-line
|
||||
import manifest from '@generated/assets-manifest.json'; //eslint-disable-line
|
||||
import routes from '@generated/routes'; // eslint-disable-line
|
||||
import preload from './preload';
|
||||
import App from './App';
|
||||
|
@ -22,10 +22,10 @@ import ssrTemplate from './templates/ssr.html.template';
|
|||
// Renderer for static-site-generator-webpack-plugin (async rendering via promises)
|
||||
export default function render(locals) {
|
||||
return preload(routes, locals.path).then(() => {
|
||||
const modules = [];
|
||||
const modules = new Set();
|
||||
const context = {};
|
||||
const appHtml = ReactDOMServer.renderToString(
|
||||
<Loadable.Capture report={moduleName => modules.push(moduleName)}>
|
||||
<Loadable.Capture report={moduleName => modules.add(moduleName)}>
|
||||
<StaticRouter location={locals.path} context={context}>
|
||||
<App />
|
||||
</StaticRouter>
|
||||
|
|
|
@ -42,8 +42,7 @@ module.exports = async function start(siteDir, cliOptions = {}) {
|
|||
|
||||
// Reload files processing.
|
||||
if (!cliOptions.noWatch) {
|
||||
const reload = filepath => {
|
||||
console.log(`${filepath} has changed`);
|
||||
const reload = () => {
|
||||
load(siteDir).catch(err => {
|
||||
console.error(chalk.red(err.stack));
|
||||
});
|
||||
|
@ -80,7 +79,6 @@ module.exports = async function start(siteDir, cliOptions = {}) {
|
|||
const urls = prepareUrls(protocol, host, port);
|
||||
const openUrl = normalizeUrl([urls.localUrlForBrowser, baseUrl]);
|
||||
|
||||
// Create compiler from generated webpack config.
|
||||
const {siteConfig, plugins = []} = props;
|
||||
let config = merge(createClientConfig(props), {
|
||||
plugins: [
|
||||
|
@ -132,7 +130,8 @@ module.exports = async function start(siteDir, cliOptions = {}) {
|
|||
rewrites: [{from: /\.html$/, to: '/'}],
|
||||
},
|
||||
disableHostCheck: true,
|
||||
overlay: false,
|
||||
// Enable overlay on browser. E.g: display errors
|
||||
overlay: true,
|
||||
host,
|
||||
// https://webpack.js.org/configuration/dev-server/#devserverbefore
|
||||
// eslint-disable-next-line
|
||||
|
|
|
@ -24,7 +24,6 @@ module.exports = async function load(siteDir, cliOptions = {}) {
|
|||
constants.GENERATED_FILES_DIR_NAME,
|
||||
);
|
||||
|
||||
// Site Config
|
||||
const siteConfig = loadConfig(siteDir);
|
||||
await generate(
|
||||
generatedFilesDir,
|
||||
|
@ -32,7 +31,6 @@ module.exports = async function load(siteDir, cliOptions = {}) {
|
|||
`export default ${JSON.stringify(siteConfig, null, 2)};`,
|
||||
);
|
||||
|
||||
// Env
|
||||
const env = loadEnv({siteDir, siteConfig});
|
||||
await generate(
|
||||
generatedFilesDir,
|
||||
|
@ -52,18 +50,53 @@ module.exports = async function load(siteDir, cliOptions = {}) {
|
|||
context,
|
||||
});
|
||||
|
||||
// Resolve outDir.
|
||||
const outDir = path.resolve(siteDir, 'build');
|
||||
|
||||
// Resolve theme.
|
||||
const themePath = loadTheme(siteDir);
|
||||
|
||||
const {baseUrl} = siteConfig;
|
||||
|
||||
// Generate React Router Config.
|
||||
const {routesConfig, routesPaths} = await loadRoutes(pluginsRouteConfigs);
|
||||
// Resolve theme. TBD (Experimental)
|
||||
const themePath = loadTheme(siteDir);
|
||||
|
||||
// Routing
|
||||
const {
|
||||
routesAsyncModules,
|
||||
routesConfig,
|
||||
routesMetadata,
|
||||
routesMetadataPath,
|
||||
routesPaths,
|
||||
} = await loadRoutes(pluginsRouteConfigs);
|
||||
|
||||
// Mapping of routePath -> metadataPath. Example: '/blog' -> '@generated/metadata/blog-c06.json'
|
||||
// Very useful to know which json metadata file is related to certain route
|
||||
await generate(
|
||||
generatedFilesDir,
|
||||
'routesMetadataPath.json',
|
||||
JSON.stringify(routesMetadataPath, null, 2),
|
||||
);
|
||||
|
||||
// Mapping of routePath -> async imported modules. Example: '/blog' -> ['@theme/BlogPage']
|
||||
// Very useful to know what modules are async imported in a route
|
||||
await generate(
|
||||
generatedFilesDir,
|
||||
'routesAsyncModules.json',
|
||||
JSON.stringify(routesAsyncModules, null, 2),
|
||||
);
|
||||
|
||||
// Write out all the metadata JSON file
|
||||
await Promise.all(
|
||||
routesPaths.map(async routesPath => {
|
||||
const metadata = routesMetadata[routesPath] || {};
|
||||
const metadataPath = routesMetadataPath[routesPath];
|
||||
const metadataDir = path.join(generatedFilesDir, 'metadata');
|
||||
const fileName = metadataPath.replace(/^@generated\/metadata\//, '');
|
||||
await generate(metadataDir, fileName, JSON.stringify(metadata, null, 2));
|
||||
}),
|
||||
);
|
||||
|
||||
await generate(generatedFilesDir, 'routes.js', routesConfig);
|
||||
|
||||
// -------------------------- TBD (Experimental) ----------------------
|
||||
// TODO: we always assume that plugin loaded content always wanted to be imported globally
|
||||
// TODO: contentStore API
|
||||
// Generate contents metadata.
|
||||
const metadataTemplateFile = path.resolve(
|
||||
__dirname,
|
||||
|
@ -88,6 +121,8 @@ module.exports = async function load(siteDir, cliOptions = {}) {
|
|||
});
|
||||
await generate(generatedFilesDir, 'metadata.js', metadataFile);
|
||||
|
||||
// ------------- END OF TBD -----------------------------------------
|
||||
|
||||
const props = {
|
||||
siteConfig,
|
||||
siteDir,
|
||||
|
|
|
@ -42,7 +42,7 @@ module.exports = async function loadPlugins({pluginConfigs = [], context}) {
|
|||
const content = await plugin.loadContent();
|
||||
const pluginContentPath = path.join(name, metadataFileName);
|
||||
const pluginContentDir = path.join(context.generatedFilesDir, name);
|
||||
fs.ensureDirSync(pluginContentDir);
|
||||
await fs.ensureDir(pluginContentDir);
|
||||
await generate(
|
||||
pluginContentDir,
|
||||
metadataFileName,
|
||||
|
|
|
@ -5,98 +5,159 @@
|
|||
* LICENSE file in the root directory of this source tree.
|
||||
*/
|
||||
|
||||
const {generateChunkName} = require('@docusaurus/utils');
|
||||
const {genChunkName, docuHash} = require('@docusaurus/utils');
|
||||
const {stringify} = require('querystring');
|
||||
|
||||
async function loadRoutes(pluginsRouteConfigs) {
|
||||
const imports = [
|
||||
const routesImports = [
|
||||
`import React from 'react';`,
|
||||
`import Loadable from 'react-loadable';`,
|
||||
`import Loading from '@theme/Loading';`,
|
||||
`import NotFound from '@theme/NotFound';`,
|
||||
];
|
||||
|
||||
// Routes paths. Example: ['/', '/docs', '/blog/2017/09/03/test']
|
||||
const routesPaths = [];
|
||||
const addRoutesPath = permalink => {
|
||||
if (permalink && !/:|\*/.test(permalink)) {
|
||||
routesPaths.push(permalink);
|
||||
const addRoutesPath = routePath => {
|
||||
routesPaths.push(routePath);
|
||||
};
|
||||
// Mapping of routePath -> metadataPath. Example: '/blog' -> '@generated/metadata/blog-c06.json'
|
||||
const routesMetadataPath = {};
|
||||
const addRoutesMetadataPath = routePath => {
|
||||
const fileName = `${docuHash(routePath)}.json`;
|
||||
routesMetadataPath[routePath] = `@generated/metadata/${fileName}`;
|
||||
};
|
||||
// Mapping of routePath -> metadata. Example: '/blog' -> { isBlogPage: true, permalink: '/blog' }
|
||||
const routesMetadata = {};
|
||||
const addRoutesMetadata = (routePath, metadata) => {
|
||||
if (metadata) {
|
||||
routesMetadata[routePath] = metadata;
|
||||
}
|
||||
};
|
||||
// Mapping of routePath -> async imported modules. Example: '/blog' -> ['@theme/BlogPage']
|
||||
const routesAsyncModules = {};
|
||||
const addRoutesAsyncModule = (routePath, module) => {
|
||||
if (!routesAsyncModules[routePath]) {
|
||||
routesAsyncModules[routePath] = [];
|
||||
}
|
||||
routesAsyncModules[routePath].push(module);
|
||||
};
|
||||
|
||||
const notFoundRoute = `
|
||||
{
|
||||
path: '*',
|
||||
component: NotFound,
|
||||
}`;
|
||||
// This is the higher level overview of route code generation
|
||||
function generateRouteCode(routeConfig) {
|
||||
const {
|
||||
path: routePath,
|
||||
component,
|
||||
metadata,
|
||||
modules = [],
|
||||
routes,
|
||||
} = routeConfig;
|
||||
|
||||
function genImportStr(target, prefix, name) {
|
||||
addRoutesPath(routePath);
|
||||
addRoutesMetadata(routePath, metadata);
|
||||
addRoutesMetadataPath(routePath);
|
||||
|
||||
// Given an input (object or string), get the import path str
|
||||
const getModulePath = target => {
|
||||
const isObj = typeof target === 'object';
|
||||
const importStr = isObj ? target.path : target;
|
||||
const queryStr = target.query ? `?${stringify(target.query)}` : '';
|
||||
const chunkName = generateChunkName(name || importStr, prefix);
|
||||
const finalStr = JSON.stringify(importStr + queryStr);
|
||||
return `() => import(/* webpackChunkName: '${chunkName}' */ ${finalStr})`;
|
||||
}
|
||||
return `${importStr}${queryStr}`;
|
||||
};
|
||||
|
||||
if (!component) {
|
||||
throw new Error(`path: ${routePath} need a component`);
|
||||
}
|
||||
const componentPath = getModulePath(component);
|
||||
addRoutesAsyncModule(routePath, componentPath);
|
||||
|
||||
const genImportStr = (modulePath, prefix, name) => {
|
||||
const chunkName = genChunkName(name || modulePath, prefix);
|
||||
const finalStr = JSON.stringify(modulePath);
|
||||
return `() => import(/* webpackChunkName: '${chunkName}' */ ${finalStr})`;
|
||||
};
|
||||
|
||||
function generateRouteCode(pluginRouteConfig) {
|
||||
const {path, component, metadata, modules, routes} = pluginRouteConfig;
|
||||
if (routes) {
|
||||
const componentStr = `Loadable({
|
||||
loader: ${genImportStr(componentPath, 'component')},
|
||||
loading: Loading
|
||||
})`;
|
||||
return `
|
||||
{
|
||||
path: '${path}',
|
||||
component: Loadable({
|
||||
loader: ${genImportStr(component, 'component')},
|
||||
loading: Loading,
|
||||
}),
|
||||
path: '${routePath}',
|
||||
component: ${componentStr},
|
||||
routes: [${routes.map(generateRouteCode).join(',')}],
|
||||
}`;
|
||||
}
|
||||
|
||||
addRoutesPath(path);
|
||||
const genModulesImportStr = `${modules
|
||||
.map((mod, i) => `Mod${i}: ${genImportStr(mod, i, path)},`)
|
||||
.join('\n')}`;
|
||||
const genModulesLoadedStr = `[${modules
|
||||
.map((mod, i) => `loaded.Mod${i}.default,`)
|
||||
.join('\n')}]`;
|
||||
const modulesImportStr = modules
|
||||
.map((module, i) => {
|
||||
const modulePath = getModulePath(module);
|
||||
addRoutesAsyncModule(routePath, modulePath);
|
||||
return `Mod${i}: ${genImportStr(modulePath, i, routePath)},`;
|
||||
})
|
||||
.join('\n');
|
||||
const modulesLoadedStr = modules
|
||||
.map((module, i) => `loaded.Mod${i}.default,`)
|
||||
.join('\n');
|
||||
|
||||
return `
|
||||
{
|
||||
path: '${path}',
|
||||
exact: true,
|
||||
component: Loadable.Map({
|
||||
let metadataImportStr = '';
|
||||
if (metadata) {
|
||||
const metadataPath = routesMetadataPath[routePath];
|
||||
addRoutesAsyncModule(routePath, metadataPath);
|
||||
metadataImportStr = `metadata: ${genImportStr(
|
||||
metadataPath,
|
||||
'metadata',
|
||||
routePath,
|
||||
)},`;
|
||||
}
|
||||
|
||||
const componentStr = `Loadable.Map({
|
||||
loader: {
|
||||
${genModulesImportStr}
|
||||
Component: ${genImportStr(component, 'component')},
|
||||
${modulesImportStr}
|
||||
${metadataImportStr}
|
||||
Component: ${genImportStr(componentPath, 'component')},
|
||||
},
|
||||
loading: Loading,
|
||||
render(loaded, props) {
|
||||
const Component = loaded.Component.default;
|
||||
const modules = ${genModulesLoadedStr};
|
||||
const metadata = loaded.metadata || {};
|
||||
const modules = [${modulesLoadedStr}];
|
||||
return (
|
||||
<Component {...props} metadata={${JSON.stringify(
|
||||
metadata,
|
||||
)}} modules={modules}/>
|
||||
<Component {...props} metadata={metadata} modules={modules}/>
|
||||
);
|
||||
}
|
||||
})
|
||||
})\n`;
|
||||
|
||||
return `
|
||||
{
|
||||
path: '${routePath}',
|
||||
exact: true,
|
||||
component: ${componentStr}
|
||||
}`;
|
||||
}
|
||||
|
||||
const routes = pluginsRouteConfigs.map(generateRouteCode);
|
||||
const notFoundRoute = `
|
||||
{
|
||||
path: '*',
|
||||
component: NotFound
|
||||
}`;
|
||||
|
||||
const routesConfig = `
|
||||
${imports.join('\n')}
|
||||
${routesImports.join('\n')}
|
||||
|
||||
const routes = [
|
||||
// Plugins.${routes.join(',')},
|
||||
export default [
|
||||
${routes.join(',')},
|
||||
${notFoundRoute}
|
||||
];\n`;
|
||||
|
||||
// Not Found.${notFoundRoute},
|
||||
];
|
||||
|
||||
export default routes;\n`;
|
||||
|
||||
return {routesConfig, routesPaths};
|
||||
return {
|
||||
routesAsyncModules,
|
||||
routesConfig,
|
||||
routesMetadata,
|
||||
routesMetadataPath,
|
||||
routesPaths,
|
||||
};
|
||||
}
|
||||
|
||||
module.exports = loadRoutes;
|
||||
|
|
|
@ -4,7 +4,6 @@
|
|||
* This source code is licensed under the MIT license found in the
|
||||
* LICENSE file in the root directory of this source tree.
|
||||
*/
|
||||
|
||||
const path = require('path');
|
||||
const WebpackNiceLog = require('webpack-nicelog');
|
||||
const ReactLoadableSSRAddon = require('react-loadable-ssr-addon');
|
||||
|
@ -16,6 +15,7 @@ module.exports = function createClientConfig(props) {
|
|||
const isProd = process.env.NODE_ENV === 'production';
|
||||
const config = createBaseConfig(props);
|
||||
|
||||
const {generatedFilesDir} = props;
|
||||
const clientConfig = merge(config, {
|
||||
entry: {
|
||||
main: path.resolve(__dirname, '../client/clientEntry.js'),
|
||||
|
@ -28,7 +28,7 @@ module.exports = function createClientConfig(props) {
|
|||
plugins: [
|
||||
// Generate manifests file
|
||||
new ReactLoadableSSRAddon({
|
||||
filename: 'assets-manifest.json',
|
||||
filename: path.resolve(generatedFilesDir, 'assets-manifest.json'),
|
||||
}),
|
||||
// Show compilation progress bar and build time.
|
||||
new WebpackNiceLog({
|
||||
|
|
|
@ -14,6 +14,7 @@ describe('loadRoutes', () => {
|
|||
expect(routesPaths.sort()).toMatchInlineSnapshot(`
|
||||
Array [
|
||||
"/",
|
||||
"/docs",
|
||||
"/docs/endiliey/permalink",
|
||||
"/docs/foo/bar",
|
||||
"/docs/foo/baz",
|
||||
|
@ -29,6 +30,7 @@ Array [
|
|||
expect(routesPaths.sort()).toMatchInlineSnapshot(`
|
||||
Array [
|
||||
"/",
|
||||
"/docs",
|
||||
"/docs/1.0.0/foo/bar",
|
||||
"/docs/1.0.0/foo/baz",
|
||||
"/docs/1.0.0/hello",
|
||||
|
@ -50,6 +52,7 @@ Array [
|
|||
expect(routesPaths.sort()).toMatchInlineSnapshot(`
|
||||
Array [
|
||||
"/",
|
||||
"/docs",
|
||||
"/docs/en/1.0.0/foo/bar",
|
||||
"/docs/en/1.0.0/foo/baz",
|
||||
"/docs/en/1.0.0/hello",
|
||||
|
@ -84,6 +87,7 @@ Array [
|
|||
expect(routesPaths.sort()).toMatchInlineSnapshot(`
|
||||
Array [
|
||||
"/",
|
||||
"/docs",
|
||||
"/docs/en/endiliey/permalink",
|
||||
"/docs/en/foo/bar",
|
||||
"/docs/en/foo/baz",
|
||||
|
|
12
yarn.lock
12
yarn.lock
|
@ -8081,13 +8081,6 @@ jsx-ast-utils@^2.0.1:
|
|||
dependencies:
|
||||
array-includes "^3.0.3"
|
||||
|
||||
kebab-hash@^0.1.2:
|
||||
version "0.1.2"
|
||||
resolved "https://registry.yarnpkg.com/kebab-hash/-/kebab-hash-0.1.2.tgz#dfb7949ba34d8e70114ea7d83e266e5e2a4abaac"
|
||||
integrity sha512-BTZpq3xgISmQmAVzkISy4eUutsUA7s4IEFlCwOBJjvSFOwyR7I+fza+tBc/rzYWK/NrmFHjfU1IhO3lu29Ib/w==
|
||||
dependencies:
|
||||
lodash.kebabcase "^4.1.1"
|
||||
|
||||
keyv@3.0.0:
|
||||
version "3.0.0"
|
||||
resolved "https://registry.yarnpkg.com/keyv/-/keyv-3.0.0.tgz#44923ba39e68b12a7cec7df6c3268c031f2ef373"
|
||||
|
@ -8417,11 +8410,6 @@ lodash.get@^4.4.2:
|
|||
resolved "https://registry.yarnpkg.com/lodash.get/-/lodash.get-4.4.2.tgz#2d177f652fa31e939b4438d5341499dfa3825e99"
|
||||
integrity sha1-LRd/ZS+jHpObRDjVNBSZ36OCXpk=
|
||||
|
||||
lodash.kebabcase@^4.1.1:
|
||||
version "4.1.1"
|
||||
resolved "https://registry.yarnpkg.com/lodash.kebabcase/-/lodash.kebabcase-4.1.1.tgz#8489b1cb0d29ff88195cceca448ff6d6cc295c36"
|
||||
integrity sha1-hImxyw0p/4gZXM7KRI/21swpXDY=
|
||||
|
||||
lodash.map@^4.4.0:
|
||||
version "4.6.0"
|
||||
resolved "https://registry.yarnpkg.com/lodash.map/-/lodash.map-4.6.0.tgz#771ec7839e3473d9c4cde28b19394c3562f4f6d3"
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue