mirror of
https://github.com/facebook/docusaurus.git
synced 2025-07-17 00:35:50 +02:00
refactor(core): refactor routes generation logic (#7054)
* refactor(core): refactor routes generation logic * fixes
This commit is contained in:
parent
e31e91ef47
commit
77662260f8
19 changed files with 551 additions and 506 deletions
|
@ -6,59 +6,10 @@
|
|||
*/
|
||||
|
||||
import {jest} from '@jest/globals';
|
||||
import {genChunkName, readOutputHTMLFile, generate} from '../emitUtils';
|
||||
import {readOutputHTMLFile, generate} from '../emitUtils';
|
||||
import path from 'path';
|
||||
import fs from 'fs-extra';
|
||||
|
||||
describe('genChunkName', () => {
|
||||
it('works', () => {
|
||||
const firstAssert: {[key: string]: string} = {
|
||||
'/docs/adding-blog': 'docs-adding-blog-062',
|
||||
'/docs/versioning': 'docs-versioning-8a8',
|
||||
'/': 'index',
|
||||
'/blog/2018/04/30/How-I-Converted-Profilo-To-Docusaurus':
|
||||
'blog-2018-04-30-how-i-converted-profilo-to-docusaurus-4f2',
|
||||
'/youtube': 'youtube-429',
|
||||
'/users/en/': 'users-en-f7a',
|
||||
'/blog': 'blog-c06',
|
||||
};
|
||||
Object.keys(firstAssert).forEach((str) => {
|
||||
expect(genChunkName(str)).toBe(firstAssert[str]);
|
||||
});
|
||||
});
|
||||
|
||||
it("doesn't allow different chunk name for same path", () => {
|
||||
expect(genChunkName('path/is/similar', 'oldPrefix')).toEqual(
|
||||
genChunkName('path/is/similar', 'newPrefix'),
|
||||
);
|
||||
});
|
||||
|
||||
it('emits different chunk names for different paths even with same preferred name', () => {
|
||||
const secondAssert: {[key: string]: string} = {
|
||||
'/blog/1': 'blog-85-f-089',
|
||||
'/blog/2': 'blog-353-489',
|
||||
};
|
||||
Object.keys(secondAssert).forEach((str) => {
|
||||
expect(genChunkName(str, undefined, 'blog')).toBe(secondAssert[str]);
|
||||
});
|
||||
});
|
||||
|
||||
it('only generates short unique IDs', () => {
|
||||
const thirdAssert: {[key: string]: string} = {
|
||||
a: '0cc175b9',
|
||||
b: '92eb5ffe',
|
||||
c: '4a8a08f0',
|
||||
d: '8277e091',
|
||||
};
|
||||
Object.keys(thirdAssert).forEach((str) => {
|
||||
expect(genChunkName(str, undefined, undefined, true)).toBe(
|
||||
thirdAssert[str],
|
||||
);
|
||||
});
|
||||
expect(genChunkName('d', undefined, undefined, true)).toBe('8277e091');
|
||||
});
|
||||
});
|
||||
|
||||
describe('readOutputHTMLFile', () => {
|
||||
it('trailing slash undefined', async () => {
|
||||
await expect(
|
||||
|
|
|
@ -8,7 +8,6 @@
|
|||
import path from 'path';
|
||||
import fs from 'fs-extra';
|
||||
import {createHash} from 'crypto';
|
||||
import {simpleHash, docuHash} from './hashUtils';
|
||||
import {findAsyncSequential} from './jsUtils';
|
||||
|
||||
const fileHash = new Map<string, string>();
|
||||
|
@ -18,7 +17,8 @@ const fileHash = new Map<string, string>();
|
|||
* differs from cache (for hot reload performance).
|
||||
*
|
||||
* @param generatedFilesDir Absolute path.
|
||||
* @param file Path relative to `generatedFilesDir`.
|
||||
* @param file Path relative to `generatedFilesDir`. File will always be
|
||||
* outputted; no need to ensure directory exists.
|
||||
* @param content String content to write.
|
||||
* @param skipCache If `true` (defaults as `true` for production), file is
|
||||
* force-rewritten, skipping cache.
|
||||
|
@ -29,7 +29,7 @@ export async function generate(
|
|||
content: string,
|
||||
skipCache: boolean = process.env.NODE_ENV === 'production',
|
||||
): Promise<void> {
|
||||
const filepath = path.join(generatedFilesDir, file);
|
||||
const filepath = path.resolve(generatedFilesDir, file);
|
||||
|
||||
if (skipCache) {
|
||||
await fs.outputFile(filepath, content);
|
||||
|
@ -62,35 +62,6 @@ export async function generate(
|
|||
}
|
||||
}
|
||||
|
||||
const chunkNameCache = new Map<string, string>();
|
||||
|
||||
/**
|
||||
* Generate unique chunk name given a module path.
|
||||
*/
|
||||
export function genChunkName(
|
||||
modulePath: string,
|
||||
prefix?: string,
|
||||
preferredName?: string,
|
||||
shortId: boolean = process.env.NODE_ENV === 'production',
|
||||
): string {
|
||||
let chunkName = chunkNameCache.get(modulePath);
|
||||
if (!chunkName) {
|
||||
if (shortId) {
|
||||
chunkName = simpleHash(modulePath, 8);
|
||||
} else {
|
||||
let str = modulePath;
|
||||
if (preferredName) {
|
||||
const shortHash = simpleHash(modulePath, 3);
|
||||
str = `${preferredName}${shortHash}`;
|
||||
}
|
||||
const name = str === '/' ? 'index' : docuHash(str);
|
||||
chunkName = prefix ? `${prefix}---${name}` : name;
|
||||
}
|
||||
chunkNameCache.set(modulePath, chunkName);
|
||||
}
|
||||
return chunkName;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param permalink The URL that the HTML file corresponds to, without base URL
|
||||
* @param outDir Full path to the output directory
|
||||
|
|
|
@ -22,7 +22,7 @@ export {
|
|||
DEFAULT_PLUGIN_ID,
|
||||
WEBPACK_URL_LOADER_LIMIT,
|
||||
} from './constants';
|
||||
export {generate, genChunkName, readOutputHTMLFile} from './emitUtils';
|
||||
export {generate, readOutputHTMLFile} from './emitUtils';
|
||||
export {
|
||||
getFileCommitDate,
|
||||
FileNotTrackedError,
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue