mirror of
https://github.com/facebook/docusaurus.git
synced 2025-05-10 15:47:23 +02:00
refactor(utils): categorize functions into separate files (#6773)
This commit is contained in:
parent
908ad52025
commit
670f2e5268
13 changed files with 928 additions and 886 deletions
|
@ -173,7 +173,7 @@ describe('findFolderContainingFile', () => {
|
||||||
await expect(
|
await expect(
|
||||||
findFolderContainingFile(
|
findFolderContainingFile(
|
||||||
['/abcdef', '/gehij', __dirname, '/klmn'],
|
['/abcdef', '/gehij', __dirname, '/klmn'],
|
||||||
'index.test.ts',
|
'dataFileUtils.test.ts',
|
||||||
),
|
),
|
||||||
).resolves.toEqual(__dirname);
|
).resolves.toEqual(__dirname);
|
||||||
});
|
});
|
||||||
|
@ -190,16 +190,19 @@ describe('getFolderContainingFile', () => {
|
||||||
await expect(
|
await expect(
|
||||||
getFolderContainingFile(
|
getFolderContainingFile(
|
||||||
['/abcdef', '/gehij', __dirname, '/klmn'],
|
['/abcdef', '/gehij', __dirname, '/klmn'],
|
||||||
'index.test.ts',
|
'dataFileUtils.test.ts',
|
||||||
),
|
),
|
||||||
).resolves.toEqual(__dirname);
|
).resolves.toEqual(__dirname);
|
||||||
});
|
});
|
||||||
|
|
||||||
test('throw if no folder contain such file', async () => {
|
test('throw if no folder contain such file', async () => {
|
||||||
await expect(
|
await expect(
|
||||||
getFolderContainingFile(['/abcdef', '/gehij', '/klmn'], 'index.test.ts'),
|
getFolderContainingFile(
|
||||||
|
['/abcdef', '/gehij', '/klmn'],
|
||||||
|
'dataFileUtils.test.ts',
|
||||||
|
),
|
||||||
).rejects.toThrowErrorMatchingInlineSnapshot(`
|
).rejects.toThrowErrorMatchingInlineSnapshot(`
|
||||||
"File \\"index.test.ts\\" does not exist in any of these folders:
|
"File \\"dataFileUtils.test.ts\\" does not exist in any of these folders:
|
||||||
- /abcdef
|
- /abcdef
|
||||||
- /gehij
|
- /gehij
|
||||||
- /klmn]"
|
- /klmn]"
|
||||||
|
|
154
packages/docusaurus-utils/src/__tests__/emitUtils.test.ts
Normal file
154
packages/docusaurus-utils/src/__tests__/emitUtils.test.ts
Normal file
|
@ -0,0 +1,154 @@
|
||||||
|
/**
|
||||||
|
* Copyright (c) Facebook, Inc. and its affiliates.
|
||||||
|
*
|
||||||
|
* This source code is licensed under the MIT license found in the
|
||||||
|
* LICENSE file in the root directory of this source tree.
|
||||||
|
*/
|
||||||
|
|
||||||
|
import {genChunkName, readOutputHTMLFile, generate} from '../emitUtils';
|
||||||
|
import path from 'path';
|
||||||
|
import fs from 'fs-extra';
|
||||||
|
|
||||||
|
test('genChunkName', () => {
|
||||||
|
const firstAssert: Record<string, string> = {
|
||||||
|
'/docs/adding-blog': 'docs-adding-blog-062',
|
||||||
|
'/docs/versioning': 'docs-versioning-8a8',
|
||||||
|
'/': 'index',
|
||||||
|
'/blog/2018/04/30/How-I-Converted-Profilo-To-Docusaurus':
|
||||||
|
'blog-2018-04-30-how-i-converted-profilo-to-docusaurus-4f2',
|
||||||
|
'/youtube': 'youtube-429',
|
||||||
|
'/users/en/': 'users-en-f7a',
|
||||||
|
'/blog': 'blog-c06',
|
||||||
|
};
|
||||||
|
Object.keys(firstAssert).forEach((str) => {
|
||||||
|
expect(genChunkName(str)).toBe(firstAssert[str]);
|
||||||
|
});
|
||||||
|
|
||||||
|
// Don't allow different chunk name for same path.
|
||||||
|
expect(genChunkName('path/is/similar', 'oldPrefix')).toEqual(
|
||||||
|
genChunkName('path/is/similar', 'newPrefix'),
|
||||||
|
);
|
||||||
|
|
||||||
|
// Even with same preferred name, still different chunk name for
|
||||||
|
// different path
|
||||||
|
const secondAssert: Record<string, string> = {
|
||||||
|
'/blog/1': 'blog-85-f-089',
|
||||||
|
'/blog/2': 'blog-353-489',
|
||||||
|
};
|
||||||
|
Object.keys(secondAssert).forEach((str) => {
|
||||||
|
expect(genChunkName(str, undefined, 'blog')).toBe(secondAssert[str]);
|
||||||
|
});
|
||||||
|
|
||||||
|
// Only generate short unique id
|
||||||
|
const thirdAssert: Record<string, string> = {
|
||||||
|
a: '0cc175b9',
|
||||||
|
b: '92eb5ffe',
|
||||||
|
c: '4a8a08f0',
|
||||||
|
d: '8277e091',
|
||||||
|
};
|
||||||
|
Object.keys(thirdAssert).forEach((str) => {
|
||||||
|
expect(genChunkName(str, undefined, undefined, true)).toBe(
|
||||||
|
thirdAssert[str],
|
||||||
|
);
|
||||||
|
});
|
||||||
|
expect(genChunkName('d', undefined, undefined, true)).toBe('8277e091');
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('readOutputHTMLFile', () => {
|
||||||
|
test('trailing slash undefined', async () => {
|
||||||
|
await expect(
|
||||||
|
readOutputHTMLFile(
|
||||||
|
'/file',
|
||||||
|
path.join(__dirname, '__fixtures__/build-snap'),
|
||||||
|
undefined,
|
||||||
|
).then(String),
|
||||||
|
).resolves.toEqual('file\n');
|
||||||
|
await expect(
|
||||||
|
readOutputHTMLFile(
|
||||||
|
'/folder',
|
||||||
|
path.join(__dirname, '__fixtures__/build-snap'),
|
||||||
|
undefined,
|
||||||
|
).then(String),
|
||||||
|
).resolves.toEqual('folder\n');
|
||||||
|
await expect(
|
||||||
|
readOutputHTMLFile(
|
||||||
|
'/file/',
|
||||||
|
path.join(__dirname, '__fixtures__/build-snap'),
|
||||||
|
undefined,
|
||||||
|
).then(String),
|
||||||
|
).resolves.toEqual('file\n');
|
||||||
|
await expect(
|
||||||
|
readOutputHTMLFile(
|
||||||
|
'/folder/',
|
||||||
|
path.join(__dirname, '__fixtures__/build-snap'),
|
||||||
|
undefined,
|
||||||
|
).then(String),
|
||||||
|
).resolves.toEqual('folder\n');
|
||||||
|
});
|
||||||
|
test('trailing slash true', async () => {
|
||||||
|
await expect(
|
||||||
|
readOutputHTMLFile(
|
||||||
|
'/folder',
|
||||||
|
path.join(__dirname, '__fixtures__/build-snap'),
|
||||||
|
true,
|
||||||
|
).then(String),
|
||||||
|
).resolves.toEqual('folder\n');
|
||||||
|
await expect(
|
||||||
|
readOutputHTMLFile(
|
||||||
|
'/folder/',
|
||||||
|
path.join(__dirname, '__fixtures__/build-snap'),
|
||||||
|
true,
|
||||||
|
).then(String),
|
||||||
|
).resolves.toEqual('folder\n');
|
||||||
|
});
|
||||||
|
test('trailing slash false', async () => {
|
||||||
|
await expect(
|
||||||
|
readOutputHTMLFile(
|
||||||
|
'/file',
|
||||||
|
path.join(__dirname, '__fixtures__/build-snap'),
|
||||||
|
false,
|
||||||
|
).then(String),
|
||||||
|
).resolves.toEqual('file\n');
|
||||||
|
await expect(
|
||||||
|
readOutputHTMLFile(
|
||||||
|
'/file/',
|
||||||
|
path.join(__dirname, '__fixtures__/build-snap'),
|
||||||
|
false,
|
||||||
|
).then(String),
|
||||||
|
).resolves.toEqual('file\n');
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
test('generate', async () => {
|
||||||
|
const writeMock = jest.spyOn(fs, 'writeFile').mockImplementation(() => {});
|
||||||
|
const existsMock = jest.spyOn(fs, 'pathExists');
|
||||||
|
const readMock = jest.spyOn(fs, 'readFile');
|
||||||
|
|
||||||
|
// First call: no file, no cache
|
||||||
|
existsMock.mockImplementationOnce(() => false);
|
||||||
|
await generate(__dirname, 'foo', 'bar');
|
||||||
|
expect(writeMock).toHaveBeenNthCalledWith(
|
||||||
|
1,
|
||||||
|
path.join(__dirname, 'foo'),
|
||||||
|
'bar',
|
||||||
|
);
|
||||||
|
|
||||||
|
// Second call: cache exists
|
||||||
|
await generate(__dirname, 'foo', 'bar');
|
||||||
|
expect(writeMock).toBeCalledTimes(1);
|
||||||
|
|
||||||
|
// Generate another: file exists, cache doesn't
|
||||||
|
existsMock.mockImplementationOnce(() => true);
|
||||||
|
// @ts-expect-error: seems the typedef doesn't understand overload
|
||||||
|
readMock.mockImplementationOnce(() => Promise.resolve('bar'));
|
||||||
|
await generate(__dirname, 'baz', 'bar');
|
||||||
|
expect(writeMock).toBeCalledTimes(1);
|
||||||
|
|
||||||
|
// Generate again: force skip cache
|
||||||
|
await generate(__dirname, 'foo', 'bar', true);
|
||||||
|
expect(writeMock).toHaveBeenNthCalledWith(
|
||||||
|
2,
|
||||||
|
path.join(__dirname, 'foo'),
|
||||||
|
'bar',
|
||||||
|
);
|
||||||
|
});
|
91
packages/docusaurus-utils/src/__tests__/i18nUtils.test.ts
Normal file
91
packages/docusaurus-utils/src/__tests__/i18nUtils.test.ts
Normal file
|
@ -0,0 +1,91 @@
|
||||||
|
/**
|
||||||
|
* Copyright (c) Facebook, Inc. and its affiliates.
|
||||||
|
*
|
||||||
|
* This source code is licensed under the MIT license found in the
|
||||||
|
* LICENSE file in the root directory of this source tree.
|
||||||
|
*/
|
||||||
|
|
||||||
|
import {
|
||||||
|
mergeTranslations,
|
||||||
|
updateTranslationFileMessages,
|
||||||
|
getPluginI18nPath,
|
||||||
|
} from '../i18nUtils';
|
||||||
|
|
||||||
|
test('mergeTranslations', () => {
|
||||||
|
expect(
|
||||||
|
mergeTranslations([
|
||||||
|
{
|
||||||
|
T1: {message: 'T1 message', description: 'T1 desc'},
|
||||||
|
T2: {message: 'T2 message', description: 'T2 desc'},
|
||||||
|
T3: {message: 'T3 message', description: 'T3 desc'},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
T4: {message: 'T4 message', description: 'T4 desc'},
|
||||||
|
},
|
||||||
|
{T2: {message: 'T2 message 2', description: 'T2 desc 2'}},
|
||||||
|
]),
|
||||||
|
).toEqual({
|
||||||
|
T1: {message: 'T1 message', description: 'T1 desc'},
|
||||||
|
T2: {message: 'T2 message 2', description: 'T2 desc 2'},
|
||||||
|
T3: {message: 'T3 message', description: 'T3 desc'},
|
||||||
|
T4: {message: 'T4 message', description: 'T4 desc'},
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
test('updateTranslationFileMessages', () => {
|
||||||
|
expect(
|
||||||
|
updateTranslationFileMessages(
|
||||||
|
{
|
||||||
|
path: 'abc',
|
||||||
|
content: {
|
||||||
|
t1: {message: 't1 message', description: 't1 desc'},
|
||||||
|
t2: {message: 't2 message', description: 't2 desc'},
|
||||||
|
t3: {message: 't3 message', description: 't3 desc'},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
(message) => `prefix ${message} suffix`,
|
||||||
|
),
|
||||||
|
).toEqual({
|
||||||
|
path: 'abc',
|
||||||
|
content: {
|
||||||
|
t1: {message: 'prefix t1 message suffix', description: 't1 desc'},
|
||||||
|
t2: {message: 'prefix t2 message suffix', description: 't2 desc'},
|
||||||
|
t3: {message: 'prefix t3 message suffix', description: 't3 desc'},
|
||||||
|
},
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('getPluginI18nPath', () => {
|
||||||
|
test('gets correct path', () => {
|
||||||
|
expect(
|
||||||
|
getPluginI18nPath({
|
||||||
|
siteDir: __dirname,
|
||||||
|
locale: 'zh-Hans',
|
||||||
|
pluginName: 'plugin-content-docs',
|
||||||
|
pluginId: 'community',
|
||||||
|
subPaths: ['foo'],
|
||||||
|
}),
|
||||||
|
).toMatchInlineSnapshot(
|
||||||
|
`"<PROJECT_ROOT>/packages/docusaurus-utils/src/__tests__/i18n/zh-Hans/plugin-content-docs-community/foo"`,
|
||||||
|
);
|
||||||
|
});
|
||||||
|
test('gets correct path for default plugin', () => {
|
||||||
|
expect(
|
||||||
|
getPluginI18nPath({
|
||||||
|
siteDir: __dirname,
|
||||||
|
locale: 'zh-Hans',
|
||||||
|
pluginName: 'plugin-content-docs',
|
||||||
|
subPaths: ['foo'],
|
||||||
|
}).replace(__dirname, ''),
|
||||||
|
).toMatchInlineSnapshot(`"/i18n/zh-Hans/plugin-content-docs/foo"`);
|
||||||
|
});
|
||||||
|
test('gets correct path when no subpaths', () => {
|
||||||
|
expect(
|
||||||
|
getPluginI18nPath({
|
||||||
|
siteDir: __dirname,
|
||||||
|
locale: 'zh-Hans',
|
||||||
|
pluginName: 'plugin-content-docs',
|
||||||
|
}).replace(__dirname, ''),
|
||||||
|
).toMatchInlineSnapshot(`"/i18n/zh-Hans/plugin-content-docs"`);
|
||||||
|
});
|
||||||
|
});
|
|
@ -1,529 +0,0 @@
|
||||||
/**
|
|
||||||
* Copyright (c) Facebook, Inc. and its affiliates.
|
|
||||||
*
|
|
||||||
* This source code is licensed under the MIT license found in the
|
|
||||||
* LICENSE file in the root directory of this source tree.
|
|
||||||
*/
|
|
||||||
|
|
||||||
import {
|
|
||||||
fileToPath,
|
|
||||||
genChunkName,
|
|
||||||
isValidPathname,
|
|
||||||
addTrailingSlash,
|
|
||||||
removeTrailingSlash,
|
|
||||||
removeSuffix,
|
|
||||||
removePrefix,
|
|
||||||
addLeadingSlash,
|
|
||||||
getElementsAround,
|
|
||||||
mergeTranslations,
|
|
||||||
mapAsyncSequential,
|
|
||||||
findAsyncSequential,
|
|
||||||
updateTranslationFileMessages,
|
|
||||||
encodePath,
|
|
||||||
addTrailingPathSeparator,
|
|
||||||
resolvePathname,
|
|
||||||
getPluginI18nPath,
|
|
||||||
generate,
|
|
||||||
reportMessage,
|
|
||||||
posixPath,
|
|
||||||
readOutputHTMLFile,
|
|
||||||
} from '../index';
|
|
||||||
import _ from 'lodash';
|
|
||||||
import fs from 'fs-extra';
|
|
||||||
import path from 'path';
|
|
||||||
|
|
||||||
describe('load utils', () => {
|
|
||||||
test('fileToPath', () => {
|
|
||||||
const asserts: Record<string, string> = {
|
|
||||||
'index.md': '/',
|
|
||||||
'hello/index.md': '/hello/',
|
|
||||||
'foo.md': '/foo',
|
|
||||||
'foo/bar.md': '/foo/bar',
|
|
||||||
'index.js': '/',
|
|
||||||
'hello/index.js': '/hello/',
|
|
||||||
'foo.js': '/foo',
|
|
||||||
'foo/bar.js': '/foo/bar',
|
|
||||||
};
|
|
||||||
Object.keys(asserts).forEach((file) => {
|
|
||||||
expect(fileToPath(file)).toBe(asserts[file]);
|
|
||||||
});
|
|
||||||
});
|
|
||||||
|
|
||||||
test('encodePath', () => {
|
|
||||||
expect(encodePath('a/foo/')).toEqual('a/foo/');
|
|
||||||
expect(encodePath('a/<foo>/')).toEqual('a/%3Cfoo%3E/');
|
|
||||||
expect(encodePath('a/你好/')).toEqual('a/%E4%BD%A0%E5%A5%BD/');
|
|
||||||
});
|
|
||||||
|
|
||||||
test('genChunkName', () => {
|
|
||||||
const firstAssert: Record<string, string> = {
|
|
||||||
'/docs/adding-blog': 'docs-adding-blog-062',
|
|
||||||
'/docs/versioning': 'docs-versioning-8a8',
|
|
||||||
'/': 'index',
|
|
||||||
'/blog/2018/04/30/How-I-Converted-Profilo-To-Docusaurus':
|
|
||||||
'blog-2018-04-30-how-i-converted-profilo-to-docusaurus-4f2',
|
|
||||||
'/youtube': 'youtube-429',
|
|
||||||
'/users/en/': 'users-en-f7a',
|
|
||||||
'/blog': 'blog-c06',
|
|
||||||
};
|
|
||||||
Object.keys(firstAssert).forEach((str) => {
|
|
||||||
expect(genChunkName(str)).toBe(firstAssert[str]);
|
|
||||||
});
|
|
||||||
|
|
||||||
// Don't allow different chunk name for same path.
|
|
||||||
expect(genChunkName('path/is/similar', 'oldPrefix')).toEqual(
|
|
||||||
genChunkName('path/is/similar', 'newPrefix'),
|
|
||||||
);
|
|
||||||
|
|
||||||
// Even with same preferred name, still different chunk name for
|
|
||||||
// different path
|
|
||||||
const secondAssert: Record<string, string> = {
|
|
||||||
'/blog/1': 'blog-85-f-089',
|
|
||||||
'/blog/2': 'blog-353-489',
|
|
||||||
};
|
|
||||||
Object.keys(secondAssert).forEach((str) => {
|
|
||||||
expect(genChunkName(str, undefined, 'blog')).toBe(secondAssert[str]);
|
|
||||||
});
|
|
||||||
|
|
||||||
// Only generate short unique id
|
|
||||||
const thirdAssert: Record<string, string> = {
|
|
||||||
a: '0cc175b9',
|
|
||||||
b: '92eb5ffe',
|
|
||||||
c: '4a8a08f0',
|
|
||||||
d: '8277e091',
|
|
||||||
};
|
|
||||||
Object.keys(thirdAssert).forEach((str) => {
|
|
||||||
expect(genChunkName(str, undefined, undefined, true)).toBe(
|
|
||||||
thirdAssert[str],
|
|
||||||
);
|
|
||||||
});
|
|
||||||
expect(genChunkName('d', undefined, undefined, true)).toBe('8277e091');
|
|
||||||
});
|
|
||||||
|
|
||||||
test('addTrailingPathSeparator', () => {
|
|
||||||
expect(addTrailingPathSeparator('foo')).toEqual(
|
|
||||||
process.platform === 'win32' ? 'foo\\' : 'foo/',
|
|
||||||
);
|
|
||||||
expect(addTrailingPathSeparator('foo/')).toEqual(
|
|
||||||
process.platform === 'win32' ? 'foo\\' : 'foo/',
|
|
||||||
);
|
|
||||||
});
|
|
||||||
|
|
||||||
test('resolvePathname', () => {
|
|
||||||
// These tests are directly copied from https://github.com/mjackson/resolve-pathname/blob/master/modules/__tests__/resolvePathname-test.js
|
|
||||||
// Maybe we want to wrap that logic in the future?
|
|
||||||
expect(resolvePathname('c')).toEqual('c');
|
|
||||||
expect(resolvePathname('c', 'a/b')).toEqual('a/c');
|
|
||||||
expect(resolvePathname('/c', '/a/b')).toEqual('/c');
|
|
||||||
expect(resolvePathname('', '/a/b')).toEqual('/a/b');
|
|
||||||
expect(resolvePathname('../c', '/a/b')).toEqual('/c');
|
|
||||||
expect(resolvePathname('c', '/a/b')).toEqual('/a/c');
|
|
||||||
expect(resolvePathname('c', '/a/')).toEqual('/a/c');
|
|
||||||
expect(resolvePathname('..', '/a/b')).toEqual('/');
|
|
||||||
});
|
|
||||||
|
|
||||||
test('isValidPathname', () => {
|
|
||||||
expect(isValidPathname('/')).toBe(true);
|
|
||||||
expect(isValidPathname('/hey')).toBe(true);
|
|
||||||
expect(isValidPathname('/hey/ho')).toBe(true);
|
|
||||||
expect(isValidPathname('/hey/ho/')).toBe(true);
|
|
||||||
expect(isValidPathname('/hey/h%C3%B4/')).toBe(true);
|
|
||||||
expect(isValidPathname('/hey///ho///')).toBe(true); // Unexpected but valid
|
|
||||||
expect(isValidPathname('/hey/héllô you')).toBe(true);
|
|
||||||
|
|
||||||
expect(isValidPathname('')).toBe(false);
|
|
||||||
expect(isValidPathname('hey')).toBe(false);
|
|
||||||
expect(isValidPathname('/hey?qs=ho')).toBe(false);
|
|
||||||
expect(isValidPathname('https://fb.com/hey')).toBe(false);
|
|
||||||
expect(isValidPathname('//hey')).toBe(false);
|
|
||||||
expect(isValidPathname('////')).toBe(false);
|
|
||||||
});
|
|
||||||
});
|
|
||||||
|
|
||||||
describe('generate', () => {
|
|
||||||
test('behaves correctly', async () => {
|
|
||||||
const writeMock = jest.spyOn(fs, 'writeFile').mockImplementation(() => {});
|
|
||||||
const existsMock = jest.spyOn(fs, 'pathExists');
|
|
||||||
const readMock = jest.spyOn(fs, 'readFile');
|
|
||||||
|
|
||||||
// First call: no file, no cache
|
|
||||||
existsMock.mockImplementationOnce(() => false);
|
|
||||||
await generate(__dirname, 'foo', 'bar');
|
|
||||||
expect(writeMock).toHaveBeenNthCalledWith(
|
|
||||||
1,
|
|
||||||
path.join(__dirname, 'foo'),
|
|
||||||
'bar',
|
|
||||||
);
|
|
||||||
|
|
||||||
// Second call: cache exists
|
|
||||||
await generate(__dirname, 'foo', 'bar');
|
|
||||||
expect(writeMock).toBeCalledTimes(1);
|
|
||||||
|
|
||||||
// Generate another: file exists, cache doesn't
|
|
||||||
existsMock.mockImplementationOnce(() => true);
|
|
||||||
// @ts-expect-error: seems the typedef doesn't understand overload
|
|
||||||
readMock.mockImplementationOnce(() => Promise.resolve('bar'));
|
|
||||||
await generate(__dirname, 'baz', 'bar');
|
|
||||||
expect(writeMock).toBeCalledTimes(1);
|
|
||||||
|
|
||||||
// Generate again: force skip cache
|
|
||||||
await generate(__dirname, 'foo', 'bar', true);
|
|
||||||
expect(writeMock).toHaveBeenNthCalledWith(
|
|
||||||
2,
|
|
||||||
path.join(__dirname, 'foo'),
|
|
||||||
'bar',
|
|
||||||
);
|
|
||||||
});
|
|
||||||
});
|
|
||||||
|
|
||||||
describe('addTrailingSlash', () => {
|
|
||||||
test('should no-op', () => {
|
|
||||||
expect(addTrailingSlash('/abcd/')).toEqual('/abcd/');
|
|
||||||
});
|
|
||||||
test('should add /', () => {
|
|
||||||
expect(addTrailingSlash('/abcd')).toEqual('/abcd/');
|
|
||||||
});
|
|
||||||
});
|
|
||||||
|
|
||||||
describe('addLeadingSlash', () => {
|
|
||||||
test('should no-op', () => {
|
|
||||||
expect(addLeadingSlash('/abc')).toEqual('/abc');
|
|
||||||
});
|
|
||||||
test('should add /', () => {
|
|
||||||
expect(addLeadingSlash('abc')).toEqual('/abc');
|
|
||||||
});
|
|
||||||
});
|
|
||||||
|
|
||||||
describe('removeTrailingSlash', () => {
|
|
||||||
test('should no-op', () => {
|
|
||||||
expect(removeTrailingSlash('/abcd')).toEqual('/abcd');
|
|
||||||
});
|
|
||||||
test('should remove /', () => {
|
|
||||||
expect(removeTrailingSlash('/abcd/')).toEqual('/abcd');
|
|
||||||
});
|
|
||||||
});
|
|
||||||
|
|
||||||
describe('removeSuffix', () => {
|
|
||||||
test('should no-op 1', () => {
|
|
||||||
expect(removeSuffix('abcdef', 'ijk')).toEqual('abcdef');
|
|
||||||
});
|
|
||||||
test('should no-op 2', () => {
|
|
||||||
expect(removeSuffix('abcdef', 'abc')).toEqual('abcdef');
|
|
||||||
});
|
|
||||||
test('should no-op 3', () => {
|
|
||||||
expect(removeSuffix('abcdef', '')).toEqual('abcdef');
|
|
||||||
});
|
|
||||||
test('should remove suffix', () => {
|
|
||||||
expect(removeSuffix('abcdef', 'ef')).toEqual('abcd');
|
|
||||||
});
|
|
||||||
});
|
|
||||||
|
|
||||||
describe('removePrefix', () => {
|
|
||||||
test('should no-op 1', () => {
|
|
||||||
expect(removePrefix('abcdef', 'ijk')).toEqual('abcdef');
|
|
||||||
});
|
|
||||||
test('should no-op 2', () => {
|
|
||||||
expect(removePrefix('abcdef', 'def')).toEqual('abcdef');
|
|
||||||
});
|
|
||||||
test('should no-op 3', () => {
|
|
||||||
expect(removePrefix('abcdef', '')).toEqual('abcdef');
|
|
||||||
});
|
|
||||||
test('should remove prefix', () => {
|
|
||||||
expect(removePrefix('abcdef', 'ab')).toEqual('cdef');
|
|
||||||
});
|
|
||||||
});
|
|
||||||
|
|
||||||
describe('getElementsAround', () => {
|
|
||||||
test('can return elements around', () => {
|
|
||||||
expect(getElementsAround(['a', 'b', 'c', 'd'], 0)).toEqual({
|
|
||||||
previous: undefined,
|
|
||||||
next: 'b',
|
|
||||||
});
|
|
||||||
expect(getElementsAround(['a', 'b', 'c', 'd'], 1)).toEqual({
|
|
||||||
previous: 'a',
|
|
||||||
next: 'c',
|
|
||||||
});
|
|
||||||
expect(getElementsAround(['a', 'b', 'c', 'd'], 2)).toEqual({
|
|
||||||
previous: 'b',
|
|
||||||
next: 'd',
|
|
||||||
});
|
|
||||||
expect(getElementsAround(['a', 'b', 'c', 'd'], 3)).toEqual({
|
|
||||||
previous: 'c',
|
|
||||||
next: undefined,
|
|
||||||
});
|
|
||||||
});
|
|
||||||
|
|
||||||
test('throws if bad index is provided', () => {
|
|
||||||
expect(() =>
|
|
||||||
getElementsAround(['a', 'b', 'c', 'd'], -1),
|
|
||||||
).toThrowErrorMatchingInlineSnapshot(
|
|
||||||
`"Valid \\"aroundIndex\\" for array (of size 4) are between 0 and 3, but you provided -1."`,
|
|
||||||
);
|
|
||||||
expect(() =>
|
|
||||||
getElementsAround(['a', 'b', 'c', 'd'], 4),
|
|
||||||
).toThrowErrorMatchingInlineSnapshot(
|
|
||||||
`"Valid \\"aroundIndex\\" for array (of size 4) are between 0 and 3, but you provided 4."`,
|
|
||||||
);
|
|
||||||
});
|
|
||||||
});
|
|
||||||
|
|
||||||
describe('mergeTranslations', () => {
|
|
||||||
test('should merge translations', () => {
|
|
||||||
expect(
|
|
||||||
mergeTranslations([
|
|
||||||
{
|
|
||||||
T1: {message: 'T1 message', description: 'T1 desc'},
|
|
||||||
T2: {message: 'T2 message', description: 'T2 desc'},
|
|
||||||
T3: {message: 'T3 message', description: 'T3 desc'},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
T4: {message: 'T4 message', description: 'T4 desc'},
|
|
||||||
},
|
|
||||||
{T2: {message: 'T2 message 2', description: 'T2 desc 2'}},
|
|
||||||
]),
|
|
||||||
).toEqual({
|
|
||||||
T1: {message: 'T1 message', description: 'T1 desc'},
|
|
||||||
T2: {message: 'T2 message 2', description: 'T2 desc 2'},
|
|
||||||
T3: {message: 'T3 message', description: 'T3 desc'},
|
|
||||||
T4: {message: 'T4 message', description: 'T4 desc'},
|
|
||||||
});
|
|
||||||
});
|
|
||||||
});
|
|
||||||
|
|
||||||
describe('mapAsyncSequential', () => {
|
|
||||||
function sleep(timeout: number): Promise<void> {
|
|
||||||
return new Promise((resolve) => {
|
|
||||||
setTimeout(resolve, timeout);
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
test('map sequentially', async () => {
|
|
||||||
const itemToTimeout: Record<string, number> = {
|
|
||||||
'1': 50,
|
|
||||||
'2': 150,
|
|
||||||
'3': 100,
|
|
||||||
};
|
|
||||||
const items = Object.keys(itemToTimeout);
|
|
||||||
|
|
||||||
const itemMapStartsAt: Record<string, number> = {};
|
|
||||||
const itemMapEndsAt: Record<string, number> = {};
|
|
||||||
|
|
||||||
const timeBefore = Date.now();
|
|
||||||
await expect(
|
|
||||||
mapAsyncSequential(items, async (item) => {
|
|
||||||
const itemTimeout = itemToTimeout[item];
|
|
||||||
itemMapStartsAt[item] = Date.now();
|
|
||||||
await sleep(itemTimeout);
|
|
||||||
itemMapEndsAt[item] = Date.now();
|
|
||||||
return `${item} mapped`;
|
|
||||||
}),
|
|
||||||
).resolves.toEqual(['1 mapped', '2 mapped', '3 mapped']);
|
|
||||||
const timeAfter = Date.now();
|
|
||||||
|
|
||||||
const timeTotal = timeAfter - timeBefore;
|
|
||||||
|
|
||||||
const totalTimeouts = _.sum(Object.values(itemToTimeout));
|
|
||||||
expect(timeTotal).toBeGreaterThanOrEqual(totalTimeouts - 20);
|
|
||||||
|
|
||||||
expect(itemMapStartsAt['1']).toBeGreaterThanOrEqual(0);
|
|
||||||
expect(itemMapStartsAt['2']).toBeGreaterThanOrEqual(
|
|
||||||
itemMapEndsAt['1'] - 20,
|
|
||||||
);
|
|
||||||
expect(itemMapStartsAt['3']).toBeGreaterThanOrEqual(
|
|
||||||
itemMapEndsAt['2'] - 20,
|
|
||||||
);
|
|
||||||
});
|
|
||||||
});
|
|
||||||
|
|
||||||
describe('findAsyncSequential', () => {
|
|
||||||
function sleep(timeout: number): Promise<void> {
|
|
||||||
return new Promise((resolve) => {
|
|
||||||
setTimeout(resolve, timeout);
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
test('find sequentially', async () => {
|
|
||||||
const items = ['1', '2', '3'];
|
|
||||||
|
|
||||||
const findFn = jest.fn(async (item: string) => {
|
|
||||||
await sleep(50);
|
|
||||||
return item === '2';
|
|
||||||
});
|
|
||||||
|
|
||||||
const timeBefore = Date.now();
|
|
||||||
await expect(findAsyncSequential(items, findFn)).resolves.toEqual('2');
|
|
||||||
const timeAfter = Date.now();
|
|
||||||
|
|
||||||
expect(findFn).toHaveBeenCalledTimes(2);
|
|
||||||
expect(findFn).toHaveBeenNthCalledWith(1, '1');
|
|
||||||
expect(findFn).toHaveBeenNthCalledWith(2, '2');
|
|
||||||
|
|
||||||
const timeTotal = timeAfter - timeBefore;
|
|
||||||
expect(timeTotal).toBeGreaterThanOrEqual(80);
|
|
||||||
expect(timeTotal).toBeLessThan(120);
|
|
||||||
});
|
|
||||||
});
|
|
||||||
|
|
||||||
describe('readOutputHTMLFile', () => {
|
|
||||||
test('trailing slash undefined', async () => {
|
|
||||||
await expect(
|
|
||||||
readOutputHTMLFile(
|
|
||||||
'/file',
|
|
||||||
path.join(__dirname, '__fixtures__/build-snap'),
|
|
||||||
undefined,
|
|
||||||
).then(String),
|
|
||||||
).resolves.toEqual('file\n');
|
|
||||||
await expect(
|
|
||||||
readOutputHTMLFile(
|
|
||||||
'/folder',
|
|
||||||
path.join(__dirname, '__fixtures__/build-snap'),
|
|
||||||
undefined,
|
|
||||||
).then(String),
|
|
||||||
).resolves.toEqual('folder\n');
|
|
||||||
await expect(
|
|
||||||
readOutputHTMLFile(
|
|
||||||
'/file/',
|
|
||||||
path.join(__dirname, '__fixtures__/build-snap'),
|
|
||||||
undefined,
|
|
||||||
).then(String),
|
|
||||||
).resolves.toEqual('file\n');
|
|
||||||
await expect(
|
|
||||||
readOutputHTMLFile(
|
|
||||||
'/folder/',
|
|
||||||
path.join(__dirname, '__fixtures__/build-snap'),
|
|
||||||
undefined,
|
|
||||||
).then(String),
|
|
||||||
).resolves.toEqual('folder\n');
|
|
||||||
});
|
|
||||||
test('trailing slash true', async () => {
|
|
||||||
await expect(
|
|
||||||
readOutputHTMLFile(
|
|
||||||
'/folder',
|
|
||||||
path.join(__dirname, '__fixtures__/build-snap'),
|
|
||||||
true,
|
|
||||||
).then(String),
|
|
||||||
).resolves.toEqual('folder\n');
|
|
||||||
await expect(
|
|
||||||
readOutputHTMLFile(
|
|
||||||
'/folder/',
|
|
||||||
path.join(__dirname, '__fixtures__/build-snap'),
|
|
||||||
true,
|
|
||||||
).then(String),
|
|
||||||
).resolves.toEqual('folder\n');
|
|
||||||
});
|
|
||||||
test('trailing slash false', async () => {
|
|
||||||
await expect(
|
|
||||||
readOutputHTMLFile(
|
|
||||||
'/file',
|
|
||||||
path.join(__dirname, '__fixtures__/build-snap'),
|
|
||||||
false,
|
|
||||||
).then(String),
|
|
||||||
).resolves.toEqual('file\n');
|
|
||||||
await expect(
|
|
||||||
readOutputHTMLFile(
|
|
||||||
'/file/',
|
|
||||||
path.join(__dirname, '__fixtures__/build-snap'),
|
|
||||||
false,
|
|
||||||
).then(String),
|
|
||||||
).resolves.toEqual('file\n');
|
|
||||||
});
|
|
||||||
});
|
|
||||||
|
|
||||||
describe('updateTranslationFileMessages', () => {
|
|
||||||
test('should update messages', () => {
|
|
||||||
expect(
|
|
||||||
updateTranslationFileMessages(
|
|
||||||
{
|
|
||||||
path: 'abc',
|
|
||||||
content: {
|
|
||||||
t1: {message: 't1 message', description: 't1 desc'},
|
|
||||||
t2: {message: 't2 message', description: 't2 desc'},
|
|
||||||
t3: {message: 't3 message', description: 't3 desc'},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
(message) => `prefix ${message} suffix`,
|
|
||||||
),
|
|
||||||
).toEqual({
|
|
||||||
path: 'abc',
|
|
||||||
content: {
|
|
||||||
t1: {message: 'prefix t1 message suffix', description: 't1 desc'},
|
|
||||||
t2: {message: 'prefix t2 message suffix', description: 't2 desc'},
|
|
||||||
t3: {message: 'prefix t3 message suffix', description: 't3 desc'},
|
|
||||||
},
|
|
||||||
});
|
|
||||||
});
|
|
||||||
});
|
|
||||||
|
|
||||||
describe('getPluginI18nPath', () => {
|
|
||||||
test('gets correct path', () => {
|
|
||||||
expect(
|
|
||||||
posixPath(
|
|
||||||
getPluginI18nPath({
|
|
||||||
siteDir: __dirname,
|
|
||||||
locale: 'zh-Hans',
|
|
||||||
pluginName: 'plugin-content-docs',
|
|
||||||
pluginId: 'community',
|
|
||||||
subPaths: ['foo'],
|
|
||||||
}).replace(__dirname, ''),
|
|
||||||
),
|
|
||||||
).toEqual('/i18n/zh-Hans/plugin-content-docs-community/foo');
|
|
||||||
});
|
|
||||||
test('gets correct path for default plugin', () => {
|
|
||||||
expect(
|
|
||||||
posixPath(
|
|
||||||
getPluginI18nPath({
|
|
||||||
siteDir: __dirname,
|
|
||||||
locale: 'zh-Hans',
|
|
||||||
pluginName: 'plugin-content-docs',
|
|
||||||
subPaths: ['foo'],
|
|
||||||
}).replace(__dirname, ''),
|
|
||||||
),
|
|
||||||
).toEqual('/i18n/zh-Hans/plugin-content-docs/foo');
|
|
||||||
});
|
|
||||||
test('gets correct path when no subpaths', () => {
|
|
||||||
expect(
|
|
||||||
posixPath(
|
|
||||||
getPluginI18nPath({
|
|
||||||
siteDir: __dirname,
|
|
||||||
locale: 'zh-Hans',
|
|
||||||
pluginName: 'plugin-content-docs',
|
|
||||||
}).replace(__dirname, ''),
|
|
||||||
),
|
|
||||||
).toEqual('/i18n/zh-Hans/plugin-content-docs');
|
|
||||||
});
|
|
||||||
});
|
|
||||||
|
|
||||||
describe('reportMessage', () => {
|
|
||||||
test('all severities', () => {
|
|
||||||
const consoleLog = jest.spyOn(console, 'info').mockImplementation(() => {});
|
|
||||||
const consoleWarn = jest
|
|
||||||
.spyOn(console, 'warn')
|
|
||||||
.mockImplementation(() => {});
|
|
||||||
const consoleError = jest
|
|
||||||
.spyOn(console, 'error')
|
|
||||||
.mockImplementation(() => {});
|
|
||||||
reportMessage('hey', 'ignore');
|
|
||||||
reportMessage('hey', 'log');
|
|
||||||
reportMessage('hey', 'warn');
|
|
||||||
reportMessage('hey', 'error');
|
|
||||||
expect(() =>
|
|
||||||
reportMessage('hey', 'throw'),
|
|
||||||
).toThrowErrorMatchingInlineSnapshot(`"hey"`);
|
|
||||||
expect(() =>
|
|
||||||
// @ts-expect-error: for test
|
|
||||||
reportMessage('hey', 'foo'),
|
|
||||||
).toThrowErrorMatchingInlineSnapshot(
|
|
||||||
`"Unexpected \\"reportingSeverity\\" value: foo."`,
|
|
||||||
);
|
|
||||||
expect(consoleLog).toBeCalledTimes(1);
|
|
||||||
expect(consoleLog).toBeCalledWith(expect.stringMatching(/.*\[INFO].* hey/));
|
|
||||||
expect(consoleWarn).toBeCalledTimes(1);
|
|
||||||
expect(consoleWarn).toBeCalledWith(
|
|
||||||
expect.stringMatching(/.*\[WARNING].* hey/),
|
|
||||||
);
|
|
||||||
expect(consoleError).toBeCalledTimes(1);
|
|
||||||
expect(consoleError).toBeCalledWith(
|
|
||||||
expect.stringMatching(/.*\[ERROR].* hey/),
|
|
||||||
);
|
|
||||||
});
|
|
||||||
});
|
|
189
packages/docusaurus-utils/src/__tests__/jsUtils.test.ts
Normal file
189
packages/docusaurus-utils/src/__tests__/jsUtils.test.ts
Normal file
|
@ -0,0 +1,189 @@
|
||||||
|
/**
|
||||||
|
* Copyright (c) Facebook, Inc. and its affiliates.
|
||||||
|
*
|
||||||
|
* This source code is licensed under the MIT license found in the
|
||||||
|
* LICENSE file in the root directory of this source tree.
|
||||||
|
*/
|
||||||
|
|
||||||
|
import {
|
||||||
|
removeSuffix,
|
||||||
|
removePrefix,
|
||||||
|
getElementsAround,
|
||||||
|
mapAsyncSequential,
|
||||||
|
findAsyncSequential,
|
||||||
|
reportMessage,
|
||||||
|
} from '../jsUtils';
|
||||||
|
import _ from 'lodash';
|
||||||
|
|
||||||
|
describe('removeSuffix', () => {
|
||||||
|
test('should no-op 1', () => {
|
||||||
|
expect(removeSuffix('abcdef', 'ijk')).toEqual('abcdef');
|
||||||
|
});
|
||||||
|
test('should no-op 2', () => {
|
||||||
|
expect(removeSuffix('abcdef', 'abc')).toEqual('abcdef');
|
||||||
|
});
|
||||||
|
test('should no-op 3', () => {
|
||||||
|
expect(removeSuffix('abcdef', '')).toEqual('abcdef');
|
||||||
|
});
|
||||||
|
test('should remove suffix', () => {
|
||||||
|
expect(removeSuffix('abcdef', 'ef')).toEqual('abcd');
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('removePrefix', () => {
|
||||||
|
test('should no-op 1', () => {
|
||||||
|
expect(removePrefix('abcdef', 'ijk')).toEqual('abcdef');
|
||||||
|
});
|
||||||
|
test('should no-op 2', () => {
|
||||||
|
expect(removePrefix('abcdef', 'def')).toEqual('abcdef');
|
||||||
|
});
|
||||||
|
test('should no-op 3', () => {
|
||||||
|
expect(removePrefix('abcdef', '')).toEqual('abcdef');
|
||||||
|
});
|
||||||
|
test('should remove prefix', () => {
|
||||||
|
expect(removePrefix('abcdef', 'ab')).toEqual('cdef');
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('getElementsAround', () => {
|
||||||
|
test('can return elements around', () => {
|
||||||
|
expect(getElementsAround(['a', 'b', 'c', 'd'], 0)).toEqual({
|
||||||
|
previous: undefined,
|
||||||
|
next: 'b',
|
||||||
|
});
|
||||||
|
expect(getElementsAround(['a', 'b', 'c', 'd'], 1)).toEqual({
|
||||||
|
previous: 'a',
|
||||||
|
next: 'c',
|
||||||
|
});
|
||||||
|
expect(getElementsAround(['a', 'b', 'c', 'd'], 2)).toEqual({
|
||||||
|
previous: 'b',
|
||||||
|
next: 'd',
|
||||||
|
});
|
||||||
|
expect(getElementsAround(['a', 'b', 'c', 'd'], 3)).toEqual({
|
||||||
|
previous: 'c',
|
||||||
|
next: undefined,
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
test('throws if bad index is provided', () => {
|
||||||
|
expect(() =>
|
||||||
|
getElementsAround(['a', 'b', 'c', 'd'], -1),
|
||||||
|
).toThrowErrorMatchingInlineSnapshot(
|
||||||
|
`"Valid \\"aroundIndex\\" for array (of size 4) are between 0 and 3, but you provided -1."`,
|
||||||
|
);
|
||||||
|
expect(() =>
|
||||||
|
getElementsAround(['a', 'b', 'c', 'd'], 4),
|
||||||
|
).toThrowErrorMatchingInlineSnapshot(
|
||||||
|
`"Valid \\"aroundIndex\\" for array (of size 4) are between 0 and 3, but you provided 4."`,
|
||||||
|
);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('mapAsyncSequential', () => {
|
||||||
|
function sleep(timeout: number): Promise<void> {
|
||||||
|
return new Promise((resolve) => {
|
||||||
|
setTimeout(resolve, timeout);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
test('map sequentially', async () => {
|
||||||
|
const itemToTimeout: Record<string, number> = {
|
||||||
|
'1': 50,
|
||||||
|
'2': 150,
|
||||||
|
'3': 100,
|
||||||
|
};
|
||||||
|
const items = Object.keys(itemToTimeout);
|
||||||
|
|
||||||
|
const itemMapStartsAt: Record<string, number> = {};
|
||||||
|
const itemMapEndsAt: Record<string, number> = {};
|
||||||
|
|
||||||
|
const timeBefore = Date.now();
|
||||||
|
await expect(
|
||||||
|
mapAsyncSequential(items, async (item) => {
|
||||||
|
const itemTimeout = itemToTimeout[item];
|
||||||
|
itemMapStartsAt[item] = Date.now();
|
||||||
|
await sleep(itemTimeout);
|
||||||
|
itemMapEndsAt[item] = Date.now();
|
||||||
|
return `${item} mapped`;
|
||||||
|
}),
|
||||||
|
).resolves.toEqual(['1 mapped', '2 mapped', '3 mapped']);
|
||||||
|
const timeAfter = Date.now();
|
||||||
|
|
||||||
|
const timeTotal = timeAfter - timeBefore;
|
||||||
|
|
||||||
|
const totalTimeouts = _.sum(Object.values(itemToTimeout));
|
||||||
|
expect(timeTotal).toBeGreaterThanOrEqual(totalTimeouts - 20);
|
||||||
|
|
||||||
|
expect(itemMapStartsAt['1']).toBeGreaterThanOrEqual(0);
|
||||||
|
expect(itemMapStartsAt['2']).toBeGreaterThanOrEqual(
|
||||||
|
itemMapEndsAt['1'] - 20,
|
||||||
|
);
|
||||||
|
expect(itemMapStartsAt['3']).toBeGreaterThanOrEqual(
|
||||||
|
itemMapEndsAt['2'] - 20,
|
||||||
|
);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('findAsyncSequential', () => {
|
||||||
|
function sleep(timeout: number): Promise<void> {
|
||||||
|
return new Promise((resolve) => {
|
||||||
|
setTimeout(resolve, timeout);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
test('find sequentially', async () => {
|
||||||
|
const items = ['1', '2', '3'];
|
||||||
|
|
||||||
|
const findFn = jest.fn(async (item: string) => {
|
||||||
|
await sleep(50);
|
||||||
|
return item === '2';
|
||||||
|
});
|
||||||
|
|
||||||
|
const timeBefore = Date.now();
|
||||||
|
await expect(findAsyncSequential(items, findFn)).resolves.toEqual('2');
|
||||||
|
const timeAfter = Date.now();
|
||||||
|
|
||||||
|
expect(findFn).toHaveBeenCalledTimes(2);
|
||||||
|
expect(findFn).toHaveBeenNthCalledWith(1, '1');
|
||||||
|
expect(findFn).toHaveBeenNthCalledWith(2, '2');
|
||||||
|
|
||||||
|
const timeTotal = timeAfter - timeBefore;
|
||||||
|
expect(timeTotal).toBeGreaterThanOrEqual(80);
|
||||||
|
expect(timeTotal).toBeLessThan(120);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('reportMessage', () => {
|
||||||
|
test('all severities', () => {
|
||||||
|
const consoleLog = jest.spyOn(console, 'info').mockImplementation(() => {});
|
||||||
|
const consoleWarn = jest
|
||||||
|
.spyOn(console, 'warn')
|
||||||
|
.mockImplementation(() => {});
|
||||||
|
const consoleError = jest
|
||||||
|
.spyOn(console, 'error')
|
||||||
|
.mockImplementation(() => {});
|
||||||
|
reportMessage('hey', 'ignore');
|
||||||
|
reportMessage('hey', 'log');
|
||||||
|
reportMessage('hey', 'warn');
|
||||||
|
reportMessage('hey', 'error');
|
||||||
|
expect(() =>
|
||||||
|
reportMessage('hey', 'throw'),
|
||||||
|
).toThrowErrorMatchingInlineSnapshot(`"hey"`);
|
||||||
|
expect(() =>
|
||||||
|
// @ts-expect-error: for test
|
||||||
|
reportMessage('hey', 'foo'),
|
||||||
|
).toThrowErrorMatchingInlineSnapshot(
|
||||||
|
`"Unexpected \\"reportingSeverity\\" value: foo."`,
|
||||||
|
);
|
||||||
|
expect(consoleLog).toBeCalledTimes(1);
|
||||||
|
expect(consoleLog).toBeCalledWith(expect.stringMatching(/.*\[INFO].* hey/));
|
||||||
|
expect(consoleWarn).toBeCalledTimes(1);
|
||||||
|
expect(consoleWarn).toBeCalledWith(
|
||||||
|
expect.stringMatching(/.*\[WARNING].* hey/),
|
||||||
|
);
|
||||||
|
expect(consoleError).toBeCalledTimes(1);
|
||||||
|
expect(consoleError).toBeCalledWith(
|
||||||
|
expect.stringMatching(/.*\[ERROR].* hey/),
|
||||||
|
);
|
||||||
|
});
|
||||||
|
});
|
|
@ -12,6 +12,7 @@ import {
|
||||||
posixPath,
|
posixPath,
|
||||||
aliasedSitePath,
|
aliasedSitePath,
|
||||||
toMessageRelativeFilePath,
|
toMessageRelativeFilePath,
|
||||||
|
addTrailingPathSeparator,
|
||||||
} from '../pathUtils';
|
} from '../pathUtils';
|
||||||
import path from 'path';
|
import path from 'path';
|
||||||
|
|
||||||
|
@ -116,63 +117,63 @@ describe('shortName', () => {
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
describe('toMessageRelativeFilePath', () => {
|
test('toMessageRelativeFilePath', () => {
|
||||||
test('behaves correctly', () => {
|
jest
|
||||||
jest
|
.spyOn(process, 'cwd')
|
||||||
.spyOn(process, 'cwd')
|
.mockImplementationOnce(() => path.join(__dirname, '..'));
|
||||||
.mockImplementationOnce(() => path.join(__dirname, '..'));
|
expect(toMessageRelativeFilePath(path.join(__dirname, 'foo/bar.js'))).toEqual(
|
||||||
expect(
|
'__tests__/foo/bar.js',
|
||||||
toMessageRelativeFilePath(path.join(__dirname, 'foo/bar.js')),
|
);
|
||||||
).toEqual('__tests__/foo/bar.js');
|
});
|
||||||
|
|
||||||
|
test('escapePath', () => {
|
||||||
|
const asserts: Record<string, string> = {
|
||||||
|
'c:/aaaa\\bbbb': 'c:/aaaa\\\\bbbb',
|
||||||
|
'c:\\aaaa\\bbbb\\★': 'c:\\\\aaaa\\\\bbbb\\\\★',
|
||||||
|
'\\\\?\\c:\\aaaa\\bbbb': '\\\\\\\\?\\\\c:\\\\aaaa\\\\bbbb',
|
||||||
|
'c:\\aaaa\\bbbb': 'c:\\\\aaaa\\\\bbbb',
|
||||||
|
'foo\\bar': 'foo\\\\bar',
|
||||||
|
'foo\\bar/lol': 'foo\\\\bar/lol',
|
||||||
|
'website\\docs/**/*.{md,mdx}': 'website\\\\docs/**/*.{md,mdx}',
|
||||||
|
};
|
||||||
|
Object.keys(asserts).forEach((file) => {
|
||||||
|
expect(escapePath(file)).toBe(asserts[file]);
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
describe('escapePath', () => {
|
test('posixPath', () => {
|
||||||
test('escapePath works', () => {
|
const asserts: Record<string, string> = {
|
||||||
const asserts: Record<string, string> = {
|
'c:/aaaa\\bbbb': 'c:/aaaa/bbbb',
|
||||||
'c:/aaaa\\bbbb': 'c:/aaaa\\\\bbbb',
|
'c:\\aaaa\\bbbb\\★': 'c:\\aaaa\\bbbb\\★',
|
||||||
'c:\\aaaa\\bbbb\\★': 'c:\\\\aaaa\\\\bbbb\\\\★',
|
'\\\\?\\c:\\aaaa\\bbbb': '\\\\?\\c:\\aaaa\\bbbb',
|
||||||
'\\\\?\\c:\\aaaa\\bbbb': '\\\\\\\\?\\\\c:\\\\aaaa\\\\bbbb',
|
'c:\\aaaa\\bbbb': 'c:/aaaa/bbbb',
|
||||||
'c:\\aaaa\\bbbb': 'c:\\\\aaaa\\\\bbbb',
|
'foo\\bar': 'foo/bar',
|
||||||
'foo\\bar': 'foo\\\\bar',
|
'foo\\bar/lol': 'foo/bar/lol',
|
||||||
'foo\\bar/lol': 'foo\\\\bar/lol',
|
'website\\docs/**/*.{md,mdx}': 'website/docs/**/*.{md,mdx}',
|
||||||
'website\\docs/**/*.{md,mdx}': 'website\\\\docs/**/*.{md,mdx}',
|
};
|
||||||
};
|
Object.keys(asserts).forEach((file) => {
|
||||||
Object.keys(asserts).forEach((file) => {
|
expect(posixPath(file)).toBe(asserts[file]);
|
||||||
expect(escapePath(file)).toBe(asserts[file]);
|
|
||||||
});
|
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
describe('posixPath', () => {
|
test('aliasedSitePath', () => {
|
||||||
test('posixPath works', () => {
|
const asserts: Record<string, string> = {
|
||||||
const asserts: Record<string, string> = {
|
'user/website/docs/asd.md': '@site/docs/asd.md',
|
||||||
'c:/aaaa\\bbbb': 'c:/aaaa/bbbb',
|
'user/website/versioned_docs/foo/bar.md': '@site/versioned_docs/foo/bar.md',
|
||||||
'c:\\aaaa\\bbbb\\★': 'c:\\aaaa\\bbbb\\★',
|
'user/docs/test.md': '@site/../docs/test.md',
|
||||||
'\\\\?\\c:\\aaaa\\bbbb': '\\\\?\\c:\\aaaa\\bbbb',
|
};
|
||||||
'c:\\aaaa\\bbbb': 'c:/aaaa/bbbb',
|
Object.keys(asserts).forEach((file) => {
|
||||||
'foo\\bar': 'foo/bar',
|
expect(posixPath(aliasedSitePath(file, 'user/website'))).toBe(
|
||||||
'foo\\bar/lol': 'foo/bar/lol',
|
asserts[file],
|
||||||
'website\\docs/**/*.{md,mdx}': 'website/docs/**/*.{md,mdx}',
|
);
|
||||||
};
|
|
||||||
Object.keys(asserts).forEach((file) => {
|
|
||||||
expect(posixPath(file)).toBe(asserts[file]);
|
|
||||||
});
|
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
describe('aliasedSitePath', () => {
|
test('addTrailingPathSeparator', () => {
|
||||||
test('behaves correctly', () => {
|
expect(addTrailingPathSeparator('foo')).toEqual(
|
||||||
const asserts: Record<string, string> = {
|
process.platform === 'win32' ? 'foo\\' : 'foo/',
|
||||||
'user/website/docs/asd.md': '@site/docs/asd.md',
|
);
|
||||||
'user/website/versioned_docs/foo/bar.md':
|
expect(addTrailingPathSeparator('foo/')).toEqual(
|
||||||
'@site/versioned_docs/foo/bar.md',
|
process.platform === 'win32' ? 'foo\\' : 'foo/',
|
||||||
'user/docs/test.md': '@site/../docs/test.md',
|
);
|
||||||
};
|
|
||||||
Object.keys(asserts).forEach((file) => {
|
|
||||||
expect(posixPath(aliasedSitePath(file, 'user/website'))).toBe(
|
|
||||||
asserts[file],
|
|
||||||
);
|
|
||||||
});
|
|
||||||
});
|
|
||||||
});
|
});
|
||||||
|
|
|
@ -5,7 +5,17 @@
|
||||||
* LICENSE file in the root directory of this source tree.
|
* LICENSE file in the root directory of this source tree.
|
||||||
*/
|
*/
|
||||||
|
|
||||||
import {normalizeUrl, getEditUrl} from '../urlUtils';
|
import {
|
||||||
|
normalizeUrl,
|
||||||
|
getEditUrl,
|
||||||
|
fileToPath,
|
||||||
|
isValidPathname,
|
||||||
|
addTrailingSlash,
|
||||||
|
addLeadingSlash,
|
||||||
|
removeTrailingSlash,
|
||||||
|
resolvePathname,
|
||||||
|
encodePath,
|
||||||
|
} from '../urlUtils';
|
||||||
|
|
||||||
describe('normalizeUrl', () => {
|
describe('normalizeUrl', () => {
|
||||||
test('should normalize urls correctly', () => {
|
test('should normalize urls correctly', () => {
|
||||||
|
@ -150,3 +160,82 @@ describe('getEditUrl', () => {
|
||||||
expect(getEditUrl('foo/bar.md')).toBeUndefined();
|
expect(getEditUrl('foo/bar.md')).toBeUndefined();
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
|
test('fileToPath', () => {
|
||||||
|
const asserts: Record<string, string> = {
|
||||||
|
'index.md': '/',
|
||||||
|
'hello/index.md': '/hello/',
|
||||||
|
'foo.md': '/foo',
|
||||||
|
'foo/bar.md': '/foo/bar',
|
||||||
|
'index.js': '/',
|
||||||
|
'hello/index.js': '/hello/',
|
||||||
|
'foo.js': '/foo',
|
||||||
|
'foo/bar.js': '/foo/bar',
|
||||||
|
};
|
||||||
|
Object.keys(asserts).forEach((file) => {
|
||||||
|
expect(fileToPath(file)).toBe(asserts[file]);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
test('isValidPathname', () => {
|
||||||
|
expect(isValidPathname('/')).toBe(true);
|
||||||
|
expect(isValidPathname('/hey')).toBe(true);
|
||||||
|
expect(isValidPathname('/hey/ho')).toBe(true);
|
||||||
|
expect(isValidPathname('/hey/ho/')).toBe(true);
|
||||||
|
expect(isValidPathname('/hey/h%C3%B4/')).toBe(true);
|
||||||
|
expect(isValidPathname('/hey///ho///')).toBe(true); // Unexpected but valid
|
||||||
|
expect(isValidPathname('/hey/héllô you')).toBe(true);
|
||||||
|
|
||||||
|
expect(isValidPathname('')).toBe(false);
|
||||||
|
expect(isValidPathname('hey')).toBe(false);
|
||||||
|
expect(isValidPathname('/hey?qs=ho')).toBe(false);
|
||||||
|
expect(isValidPathname('https://fb.com/hey')).toBe(false);
|
||||||
|
expect(isValidPathname('//hey')).toBe(false);
|
||||||
|
expect(isValidPathname('////')).toBe(false);
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('addTrailingSlash', () => {
|
||||||
|
test('should no-op', () => {
|
||||||
|
expect(addTrailingSlash('/abcd/')).toEqual('/abcd/');
|
||||||
|
});
|
||||||
|
test('should add /', () => {
|
||||||
|
expect(addTrailingSlash('/abcd')).toEqual('/abcd/');
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('addLeadingSlash', () => {
|
||||||
|
test('should no-op', () => {
|
||||||
|
expect(addLeadingSlash('/abc')).toEqual('/abc');
|
||||||
|
});
|
||||||
|
test('should add /', () => {
|
||||||
|
expect(addLeadingSlash('abc')).toEqual('/abc');
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('removeTrailingSlash', () => {
|
||||||
|
test('should no-op', () => {
|
||||||
|
expect(removeTrailingSlash('/abcd')).toEqual('/abcd');
|
||||||
|
});
|
||||||
|
test('should remove /', () => {
|
||||||
|
expect(removeTrailingSlash('/abcd/')).toEqual('/abcd');
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
test('resolvePathname', () => {
|
||||||
|
// These tests are directly copied from https://github.com/mjackson/resolve-pathname/blob/master/modules/__tests__/resolvePathname-test.js
|
||||||
|
// Maybe we want to wrap that logic in the future?
|
||||||
|
expect(resolvePathname('c')).toEqual('c');
|
||||||
|
expect(resolvePathname('c', 'a/b')).toEqual('a/c');
|
||||||
|
expect(resolvePathname('/c', '/a/b')).toEqual('/c');
|
||||||
|
expect(resolvePathname('', '/a/b')).toEqual('/a/b');
|
||||||
|
expect(resolvePathname('../c', '/a/b')).toEqual('/c');
|
||||||
|
expect(resolvePathname('c', '/a/b')).toEqual('/a/c');
|
||||||
|
expect(resolvePathname('c', '/a/')).toEqual('/a/c');
|
||||||
|
expect(resolvePathname('..', '/a/b')).toEqual('/');
|
||||||
|
});
|
||||||
|
|
||||||
|
test('encodePath', () => {
|
||||||
|
expect(encodePath('a/foo/')).toEqual('a/foo/');
|
||||||
|
expect(encodePath('a/<foo>/')).toEqual('a/%3Cfoo%3E/');
|
||||||
|
expect(encodePath('a/你好/')).toEqual('a/%E4%BD%A0%E5%A5%BD/');
|
||||||
|
});
|
||||||
|
|
113
packages/docusaurus-utils/src/emitUtils.ts
Normal file
113
packages/docusaurus-utils/src/emitUtils.ts
Normal file
|
@ -0,0 +1,113 @@
|
||||||
|
/**
|
||||||
|
* Copyright (c) Facebook, Inc. and its affiliates.
|
||||||
|
*
|
||||||
|
* This source code is licensed under the MIT license found in the
|
||||||
|
* LICENSE file in the root directory of this source tree.
|
||||||
|
*/
|
||||||
|
|
||||||
|
import path from 'path';
|
||||||
|
import fs from 'fs-extra';
|
||||||
|
import {createHash} from 'crypto';
|
||||||
|
import {simpleHash, docuHash} from './hashUtils';
|
||||||
|
import {findAsyncSequential} from './jsUtils';
|
||||||
|
|
||||||
|
const fileHash = new Map<string, string>();
|
||||||
|
|
||||||
|
export async function generate(
|
||||||
|
generatedFilesDir: string,
|
||||||
|
file: string,
|
||||||
|
content: string,
|
||||||
|
skipCache: boolean = process.env.NODE_ENV === 'production',
|
||||||
|
): Promise<void> {
|
||||||
|
const filepath = path.join(generatedFilesDir, file);
|
||||||
|
|
||||||
|
if (skipCache) {
|
||||||
|
await fs.ensureDir(path.dirname(filepath));
|
||||||
|
await fs.writeFile(filepath, content);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
let lastHash = fileHash.get(filepath);
|
||||||
|
|
||||||
|
// If file already exists but its not in runtime cache yet,
|
||||||
|
// we try to calculate the content hash and then compare
|
||||||
|
// This is to avoid unnecessary overwriting and we can reuse old file.
|
||||||
|
if (!lastHash && (await fs.pathExists(filepath))) {
|
||||||
|
const lastContent = await fs.readFile(filepath, 'utf8');
|
||||||
|
lastHash = createHash('md5').update(lastContent).digest('hex');
|
||||||
|
fileHash.set(filepath, lastHash);
|
||||||
|
}
|
||||||
|
|
||||||
|
const currentHash = createHash('md5').update(content).digest('hex');
|
||||||
|
|
||||||
|
if (lastHash !== currentHash) {
|
||||||
|
await fs.ensureDir(path.dirname(filepath));
|
||||||
|
await fs.writeFile(filepath, content);
|
||||||
|
fileHash.set(filepath, currentHash);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const chunkNameCache = new Map();
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Generate unique chunk name given a module path.
|
||||||
|
*/
|
||||||
|
export function genChunkName(
|
||||||
|
modulePath: string,
|
||||||
|
prefix?: string,
|
||||||
|
preferredName?: string,
|
||||||
|
shortId: boolean = process.env.NODE_ENV === 'production',
|
||||||
|
): string {
|
||||||
|
let chunkName: string | undefined = chunkNameCache.get(modulePath);
|
||||||
|
if (!chunkName) {
|
||||||
|
if (shortId) {
|
||||||
|
chunkName = simpleHash(modulePath, 8);
|
||||||
|
} else {
|
||||||
|
let str = modulePath;
|
||||||
|
if (preferredName) {
|
||||||
|
const shortHash = simpleHash(modulePath, 3);
|
||||||
|
str = `${preferredName}${shortHash}`;
|
||||||
|
}
|
||||||
|
const name = str === '/' ? 'index' : docuHash(str);
|
||||||
|
chunkName = prefix ? `${prefix}---${name}` : name;
|
||||||
|
}
|
||||||
|
chunkNameCache.set(modulePath, chunkName);
|
||||||
|
}
|
||||||
|
return chunkName;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @param permalink The URL that the HTML file corresponds to, without base URL
|
||||||
|
* @param outDir Full path to the output directory
|
||||||
|
* @param trailingSlash The site config option. If provided, only one path will
|
||||||
|
* be read.
|
||||||
|
* @returns This returns a buffer, which you have to decode string yourself if
|
||||||
|
* needed. (Not always necessary since the output isn't for human consumption
|
||||||
|
* anyways, and most HTML manipulation libs accept buffers)
|
||||||
|
*/
|
||||||
|
export async function readOutputHTMLFile(
|
||||||
|
permalink: string,
|
||||||
|
outDir: string,
|
||||||
|
trailingSlash: boolean | undefined,
|
||||||
|
): Promise<Buffer> {
|
||||||
|
const withTrailingSlashPath = path.join(outDir, permalink, 'index.html');
|
||||||
|
const withoutTrailingSlashPath = path.join(
|
||||||
|
outDir,
|
||||||
|
`${permalink.replace(/\/$/, '')}.html`,
|
||||||
|
);
|
||||||
|
if (trailingSlash) {
|
||||||
|
return fs.readFile(withTrailingSlashPath);
|
||||||
|
} else if (trailingSlash === false) {
|
||||||
|
return fs.readFile(withoutTrailingSlashPath);
|
||||||
|
}
|
||||||
|
const HTMLPath = await findAsyncSequential(
|
||||||
|
[withTrailingSlashPath, withoutTrailingSlashPath],
|
||||||
|
fs.pathExists,
|
||||||
|
);
|
||||||
|
if (!HTMLPath) {
|
||||||
|
throw new Error(
|
||||||
|
`Expected output HTML file to be found at ${withTrailingSlashPath}`,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
return fs.readFile(HTMLPath);
|
||||||
|
}
|
58
packages/docusaurus-utils/src/i18nUtils.ts
Normal file
58
packages/docusaurus-utils/src/i18nUtils.ts
Normal file
|
@ -0,0 +1,58 @@
|
||||||
|
/**
|
||||||
|
* Copyright (c) Facebook, Inc. and its affiliates.
|
||||||
|
*
|
||||||
|
* This source code is licensed under the MIT license found in the
|
||||||
|
* LICENSE file in the root directory of this source tree.
|
||||||
|
*/
|
||||||
|
|
||||||
|
import path from 'path';
|
||||||
|
import _ from 'lodash';
|
||||||
|
import type {TranslationFileContent, TranslationFile} from '@docusaurus/types';
|
||||||
|
import {DEFAULT_PLUGIN_ID} from './constants';
|
||||||
|
|
||||||
|
export function mergeTranslations(
|
||||||
|
contents: TranslationFileContent[],
|
||||||
|
): TranslationFileContent {
|
||||||
|
return contents.reduce((acc, content) => ({...acc, ...content}), {});
|
||||||
|
}
|
||||||
|
|
||||||
|
// Useful to update all the messages of a translation file
|
||||||
|
// Used in tests to simulate translations
|
||||||
|
export function updateTranslationFileMessages(
|
||||||
|
translationFile: TranslationFile,
|
||||||
|
updateMessage: (message: string) => string,
|
||||||
|
): TranslationFile {
|
||||||
|
return {
|
||||||
|
...translationFile,
|
||||||
|
content: _.mapValues(translationFile.content, (translation) => ({
|
||||||
|
...translation,
|
||||||
|
message: updateMessage(translation.message),
|
||||||
|
})),
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
export function getPluginI18nPath({
|
||||||
|
siteDir,
|
||||||
|
locale,
|
||||||
|
pluginName,
|
||||||
|
pluginId = DEFAULT_PLUGIN_ID,
|
||||||
|
subPaths = [],
|
||||||
|
}: {
|
||||||
|
siteDir: string;
|
||||||
|
locale: string;
|
||||||
|
pluginName: string;
|
||||||
|
pluginId?: string | undefined;
|
||||||
|
subPaths?: string[];
|
||||||
|
}): string {
|
||||||
|
return path.join(
|
||||||
|
siteDir,
|
||||||
|
'i18n',
|
||||||
|
// namespace first by locale: convenient to work in a single folder for a
|
||||||
|
// translator
|
||||||
|
locale,
|
||||||
|
// Make it convenient to use for single-instance
|
||||||
|
// ie: return "docs", not "docs-default" nor "docs/default"
|
||||||
|
`${pluginName}${pluginId === DEFAULT_PLUGIN_ID ? '' : `-${pluginId}`}`,
|
||||||
|
...subPaths,
|
||||||
|
);
|
||||||
|
}
|
|
@ -5,23 +5,6 @@
|
||||||
* LICENSE file in the root directory of this source tree.
|
* LICENSE file in the root directory of this source tree.
|
||||||
*/
|
*/
|
||||||
|
|
||||||
import logger from '@docusaurus/logger';
|
|
||||||
import path from 'path';
|
|
||||||
import {createHash} from 'crypto';
|
|
||||||
import _ from 'lodash';
|
|
||||||
import fs from 'fs-extra';
|
|
||||||
import {URL} from 'url';
|
|
||||||
import type {
|
|
||||||
ReportingSeverity,
|
|
||||||
TranslationFileContent,
|
|
||||||
TranslationFile,
|
|
||||||
} from '@docusaurus/types';
|
|
||||||
|
|
||||||
import resolvePathnameUnsafe from 'resolve-pathname';
|
|
||||||
|
|
||||||
import {simpleHash, docuHash} from './hashUtils';
|
|
||||||
import {DEFAULT_PLUGIN_ID} from './constants';
|
|
||||||
|
|
||||||
export {
|
export {
|
||||||
NODE_MAJOR_VERSION,
|
NODE_MAJOR_VERSION,
|
||||||
NODE_MINOR_VERSION,
|
NODE_MINOR_VERSION,
|
||||||
|
@ -37,8 +20,32 @@ export {
|
||||||
DEFAULT_PLUGIN_ID,
|
DEFAULT_PLUGIN_ID,
|
||||||
WEBPACK_URL_LOADER_LIMIT,
|
WEBPACK_URL_LOADER_LIMIT,
|
||||||
} from './constants';
|
} from './constants';
|
||||||
|
export {generate, genChunkName, readOutputHTMLFile} from './emitUtils';
|
||||||
export {getFileCommitDate, GitNotFoundError} from './gitUtils';
|
export {getFileCommitDate, GitNotFoundError} from './gitUtils';
|
||||||
export {normalizeUrl, getEditUrl} from './urlUtils';
|
export {
|
||||||
|
mergeTranslations,
|
||||||
|
updateTranslationFileMessages,
|
||||||
|
getPluginI18nPath,
|
||||||
|
} from './i18nUtils';
|
||||||
|
export {
|
||||||
|
removeSuffix,
|
||||||
|
removePrefix,
|
||||||
|
getElementsAround,
|
||||||
|
mapAsyncSequential,
|
||||||
|
findAsyncSequential,
|
||||||
|
reportMessage,
|
||||||
|
} from './jsUtils';
|
||||||
|
export {
|
||||||
|
normalizeUrl,
|
||||||
|
getEditUrl,
|
||||||
|
fileToPath,
|
||||||
|
encodePath,
|
||||||
|
isValidPathname,
|
||||||
|
resolvePathname,
|
||||||
|
addLeadingSlash,
|
||||||
|
addTrailingSlash,
|
||||||
|
removeTrailingSlash,
|
||||||
|
} from './urlUtils';
|
||||||
export {
|
export {
|
||||||
type Tag,
|
type Tag,
|
||||||
type FrontMatterTag,
|
type FrontMatterTag,
|
||||||
|
@ -69,6 +76,7 @@ export {
|
||||||
toMessageRelativeFilePath,
|
toMessageRelativeFilePath,
|
||||||
aliasedSitePath,
|
aliasedSitePath,
|
||||||
escapePath,
|
escapePath,
|
||||||
|
addTrailingPathSeparator,
|
||||||
} from './pathUtils';
|
} from './pathUtils';
|
||||||
export {md5Hash, simpleHash, docuHash} from './hashUtils';
|
export {md5Hash, simpleHash, docuHash} from './hashUtils';
|
||||||
export {
|
export {
|
||||||
|
@ -85,285 +93,3 @@ export {
|
||||||
findFolderContainingFile,
|
findFolderContainingFile,
|
||||||
getFolderContainingFile,
|
getFolderContainingFile,
|
||||||
} from './dataFileUtils';
|
} from './dataFileUtils';
|
||||||
|
|
||||||
const fileHash = new Map<string, string>();
|
|
||||||
export async function generate(
|
|
||||||
generatedFilesDir: string,
|
|
||||||
file: string,
|
|
||||||
content: string,
|
|
||||||
skipCache: boolean = process.env.NODE_ENV === 'production',
|
|
||||||
): Promise<void> {
|
|
||||||
const filepath = path.join(generatedFilesDir, file);
|
|
||||||
|
|
||||||
if (skipCache) {
|
|
||||||
await fs.ensureDir(path.dirname(filepath));
|
|
||||||
await fs.writeFile(filepath, content);
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
let lastHash = fileHash.get(filepath);
|
|
||||||
|
|
||||||
// If file already exists but its not in runtime cache yet,
|
|
||||||
// we try to calculate the content hash and then compare
|
|
||||||
// This is to avoid unnecessary overwriting and we can reuse old file.
|
|
||||||
if (!lastHash && (await fs.pathExists(filepath))) {
|
|
||||||
const lastContent = await fs.readFile(filepath, 'utf8');
|
|
||||||
lastHash = createHash('md5').update(lastContent).digest('hex');
|
|
||||||
fileHash.set(filepath, lastHash);
|
|
||||||
}
|
|
||||||
|
|
||||||
const currentHash = createHash('md5').update(content).digest('hex');
|
|
||||||
|
|
||||||
if (lastHash !== currentHash) {
|
|
||||||
await fs.ensureDir(path.dirname(filepath));
|
|
||||||
await fs.writeFile(filepath, content);
|
|
||||||
fileHash.set(filepath, currentHash);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
const indexRE = /(?<dirname>^|.*\/)index\.(?:mdx?|jsx?|tsx?)$/i;
|
|
||||||
const extRE = /\.(?:mdx?|jsx?|tsx?)$/;
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Convert filepath to url path.
|
|
||||||
* Example: 'index.md' -> '/', 'foo/bar.js' -> '/foo/bar',
|
|
||||||
*/
|
|
||||||
export function fileToPath(file: string): string {
|
|
||||||
if (indexRE.test(file)) {
|
|
||||||
return file.replace(indexRE, '/$1');
|
|
||||||
}
|
|
||||||
return `/${file.replace(extRE, '').replace(/\\/g, '/')}`;
|
|
||||||
}
|
|
||||||
|
|
||||||
export function encodePath(userPath: string): string {
|
|
||||||
return userPath
|
|
||||||
.split('/')
|
|
||||||
.map((item) => encodeURIComponent(item))
|
|
||||||
.join('/');
|
|
||||||
}
|
|
||||||
|
|
||||||
const chunkNameCache = new Map();
|
|
||||||
/**
|
|
||||||
* Generate unique chunk name given a module path.
|
|
||||||
*/
|
|
||||||
export function genChunkName(
|
|
||||||
modulePath: string,
|
|
||||||
prefix?: string,
|
|
||||||
preferredName?: string,
|
|
||||||
shortId: boolean = process.env.NODE_ENV === 'production',
|
|
||||||
): string {
|
|
||||||
let chunkName: string | undefined = chunkNameCache.get(modulePath);
|
|
||||||
if (!chunkName) {
|
|
||||||
if (shortId) {
|
|
||||||
chunkName = simpleHash(modulePath, 8);
|
|
||||||
} else {
|
|
||||||
let str = modulePath;
|
|
||||||
if (preferredName) {
|
|
||||||
const shortHash = simpleHash(modulePath, 3);
|
|
||||||
str = `${preferredName}${shortHash}`;
|
|
||||||
}
|
|
||||||
const name = str === '/' ? 'index' : docuHash(str);
|
|
||||||
chunkName = prefix ? `${prefix}---${name}` : name;
|
|
||||||
}
|
|
||||||
chunkNameCache.set(modulePath, chunkName);
|
|
||||||
}
|
|
||||||
return chunkName;
|
|
||||||
}
|
|
||||||
|
|
||||||
export function isValidPathname(str: string): boolean {
|
|
||||||
if (!str.startsWith('/')) {
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
try {
|
|
||||||
// weird, but is there a better way?
|
|
||||||
const parsedPathname = new URL(str, 'https://domain.com').pathname;
|
|
||||||
return parsedPathname === str || parsedPathname === encodeURI(str);
|
|
||||||
} catch {
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// resolve pathname and fail fast if resolution fails
|
|
||||||
export function resolvePathname(to: string, from?: string): string {
|
|
||||||
return resolvePathnameUnsafe(to, from);
|
|
||||||
}
|
|
||||||
export function addLeadingSlash(str: string): string {
|
|
||||||
return str.startsWith('/') ? str : `/${str}`;
|
|
||||||
}
|
|
||||||
|
|
||||||
export function addTrailingPathSeparator(str: string): string {
|
|
||||||
return str.endsWith(path.sep)
|
|
||||||
? str
|
|
||||||
: // If this is Windows, we need to change the forward slash to backward
|
|
||||||
`${str.replace(/\/$/, '')}${path.sep}`;
|
|
||||||
}
|
|
||||||
|
|
||||||
// TODO deduplicate: also present in @docusaurus/utils-common
|
|
||||||
export function addTrailingSlash(str: string): string {
|
|
||||||
return str.endsWith('/') ? str : `${str}/`;
|
|
||||||
}
|
|
||||||
export function removeTrailingSlash(str: string): string {
|
|
||||||
return removeSuffix(str, '/');
|
|
||||||
}
|
|
||||||
|
|
||||||
export function removeSuffix(str: string, suffix: string): string {
|
|
||||||
if (suffix === '') {
|
|
||||||
return str; // always returns "" otherwise!
|
|
||||||
}
|
|
||||||
return str.endsWith(suffix) ? str.slice(0, -suffix.length) : str;
|
|
||||||
}
|
|
||||||
|
|
||||||
export function removePrefix(str: string, prefix: string): string {
|
|
||||||
return str.startsWith(prefix) ? str.slice(prefix.length) : str;
|
|
||||||
}
|
|
||||||
|
|
||||||
export function getElementsAround<T>(
|
|
||||||
array: T[],
|
|
||||||
aroundIndex: number,
|
|
||||||
): {
|
|
||||||
next: T | undefined;
|
|
||||||
previous: T | undefined;
|
|
||||||
} {
|
|
||||||
const min = 0;
|
|
||||||
const max = array.length - 1;
|
|
||||||
if (aroundIndex < min || aroundIndex > max) {
|
|
||||||
throw new Error(
|
|
||||||
`Valid "aroundIndex" for array (of size ${array.length}) are between ${min} and ${max}, but you provided ${aroundIndex}.`,
|
|
||||||
);
|
|
||||||
}
|
|
||||||
const previous = aroundIndex === min ? undefined : array[aroundIndex - 1];
|
|
||||||
const next = aroundIndex === max ? undefined : array[aroundIndex + 1];
|
|
||||||
return {previous, next};
|
|
||||||
}
|
|
||||||
|
|
||||||
export function getPluginI18nPath({
|
|
||||||
siteDir,
|
|
||||||
locale,
|
|
||||||
pluginName,
|
|
||||||
pluginId = DEFAULT_PLUGIN_ID,
|
|
||||||
subPaths = [],
|
|
||||||
}: {
|
|
||||||
siteDir: string;
|
|
||||||
locale: string;
|
|
||||||
pluginName: string;
|
|
||||||
pluginId?: string | undefined;
|
|
||||||
subPaths?: string[];
|
|
||||||
}): string {
|
|
||||||
return path.join(
|
|
||||||
siteDir,
|
|
||||||
'i18n',
|
|
||||||
// namespace first by locale: convenient to work in a single folder for a
|
|
||||||
// translator
|
|
||||||
locale,
|
|
||||||
// Make it convenient to use for single-instance
|
|
||||||
// ie: return "docs", not "docs-default" nor "docs/default"
|
|
||||||
`${pluginName}${pluginId === DEFAULT_PLUGIN_ID ? '' : `-${pluginId}`}`,
|
|
||||||
...subPaths,
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @param permalink The URL that the HTML file corresponds to, without base URL
|
|
||||||
* @param outDir Full path to the output directory
|
|
||||||
* @param trailingSlash The site config option. If provided, only one path will
|
|
||||||
* be read.
|
|
||||||
* @returns This returns a buffer, which you have to decode string yourself if
|
|
||||||
* needed. (Not always necessary since the output isn't for human consumption
|
|
||||||
* anyways, and most HTML manipulation libs accept buffers)
|
|
||||||
*/
|
|
||||||
export async function readOutputHTMLFile(
|
|
||||||
permalink: string,
|
|
||||||
outDir: string,
|
|
||||||
trailingSlash: boolean | undefined,
|
|
||||||
): Promise<Buffer> {
|
|
||||||
const withTrailingSlashPath = path.join(outDir, permalink, 'index.html');
|
|
||||||
const withoutTrailingSlashPath = path.join(
|
|
||||||
outDir,
|
|
||||||
`${permalink.replace(/\/$/, '')}.html`,
|
|
||||||
);
|
|
||||||
if (trailingSlash) {
|
|
||||||
return fs.readFile(withTrailingSlashPath);
|
|
||||||
} else if (trailingSlash === false) {
|
|
||||||
return fs.readFile(withoutTrailingSlashPath);
|
|
||||||
}
|
|
||||||
const HTMLPath = await findAsyncSequential(
|
|
||||||
[withTrailingSlashPath, withoutTrailingSlashPath],
|
|
||||||
fs.pathExists,
|
|
||||||
);
|
|
||||||
if (!HTMLPath) {
|
|
||||||
throw new Error(
|
|
||||||
`Expected output HTML file to be found at ${withTrailingSlashPath}`,
|
|
||||||
);
|
|
||||||
}
|
|
||||||
return fs.readFile(HTMLPath);
|
|
||||||
}
|
|
||||||
|
|
||||||
export async function mapAsyncSequential<T, R>(
|
|
||||||
array: T[],
|
|
||||||
action: (t: T) => Promise<R>,
|
|
||||||
): Promise<R[]> {
|
|
||||||
const results: R[] = [];
|
|
||||||
for (const t of array) {
|
|
||||||
const result = await action(t);
|
|
||||||
results.push(result);
|
|
||||||
}
|
|
||||||
return results;
|
|
||||||
}
|
|
||||||
|
|
||||||
export async function findAsyncSequential<T>(
|
|
||||||
array: T[],
|
|
||||||
predicate: (t: T) => Promise<boolean>,
|
|
||||||
): Promise<T | undefined> {
|
|
||||||
for (const t of array) {
|
|
||||||
if (await predicate(t)) {
|
|
||||||
return t;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return undefined;
|
|
||||||
}
|
|
||||||
|
|
||||||
export function reportMessage(
|
|
||||||
message: string,
|
|
||||||
reportingSeverity: ReportingSeverity,
|
|
||||||
): void {
|
|
||||||
switch (reportingSeverity) {
|
|
||||||
case 'ignore':
|
|
||||||
break;
|
|
||||||
case 'log':
|
|
||||||
logger.info(message);
|
|
||||||
break;
|
|
||||||
case 'warn':
|
|
||||||
logger.warn(message);
|
|
||||||
break;
|
|
||||||
case 'error':
|
|
||||||
logger.error(message);
|
|
||||||
break;
|
|
||||||
case 'throw':
|
|
||||||
throw new Error(message);
|
|
||||||
default:
|
|
||||||
throw new Error(
|
|
||||||
`Unexpected "reportingSeverity" value: ${reportingSeverity}.`,
|
|
||||||
);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
export function mergeTranslations(
|
|
||||||
contents: TranslationFileContent[],
|
|
||||||
): TranslationFileContent {
|
|
||||||
return contents.reduce((acc, content) => ({...acc, ...content}), {});
|
|
||||||
}
|
|
||||||
|
|
||||||
// Useful to update all the messages of a translation file
|
|
||||||
// Used in tests to simulate translations
|
|
||||||
export function updateTranslationFileMessages(
|
|
||||||
translationFile: TranslationFile,
|
|
||||||
updateMessage: (message: string) => string,
|
|
||||||
): TranslationFile {
|
|
||||||
return {
|
|
||||||
...translationFile,
|
|
||||||
content: _.mapValues(translationFile.content, (translation) => ({
|
|
||||||
...translation,
|
|
||||||
message: updateMessage(translation.message),
|
|
||||||
})),
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
88
packages/docusaurus-utils/src/jsUtils.ts
Normal file
88
packages/docusaurus-utils/src/jsUtils.ts
Normal file
|
@ -0,0 +1,88 @@
|
||||||
|
/**
|
||||||
|
* Copyright (c) Facebook, Inc. and its affiliates.
|
||||||
|
*
|
||||||
|
* This source code is licensed under the MIT license found in the
|
||||||
|
* LICENSE file in the root directory of this source tree.
|
||||||
|
*/
|
||||||
|
|
||||||
|
import type {ReportingSeverity} from '@docusaurus/types';
|
||||||
|
import logger from '@docusaurus/logger';
|
||||||
|
|
||||||
|
export function removeSuffix(str: string, suffix: string): string {
|
||||||
|
if (suffix === '') {
|
||||||
|
return str; // always returns "" otherwise!
|
||||||
|
}
|
||||||
|
return str.endsWith(suffix) ? str.slice(0, -suffix.length) : str;
|
||||||
|
}
|
||||||
|
|
||||||
|
export function removePrefix(str: string, prefix: string): string {
|
||||||
|
return str.startsWith(prefix) ? str.slice(prefix.length) : str;
|
||||||
|
}
|
||||||
|
|
||||||
|
export function getElementsAround<T>(
|
||||||
|
array: T[],
|
||||||
|
aroundIndex: number,
|
||||||
|
): {
|
||||||
|
next: T | undefined;
|
||||||
|
previous: T | undefined;
|
||||||
|
} {
|
||||||
|
const min = 0;
|
||||||
|
const max = array.length - 1;
|
||||||
|
if (aroundIndex < min || aroundIndex > max) {
|
||||||
|
throw new Error(
|
||||||
|
`Valid "aroundIndex" for array (of size ${array.length}) are between ${min} and ${max}, but you provided ${aroundIndex}.`,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
const previous = aroundIndex === min ? undefined : array[aroundIndex - 1];
|
||||||
|
const next = aroundIndex === max ? undefined : array[aroundIndex + 1];
|
||||||
|
return {previous, next};
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function mapAsyncSequential<T, R>(
|
||||||
|
array: T[],
|
||||||
|
action: (t: T) => Promise<R>,
|
||||||
|
): Promise<R[]> {
|
||||||
|
const results: R[] = [];
|
||||||
|
for (const t of array) {
|
||||||
|
const result = await action(t);
|
||||||
|
results.push(result);
|
||||||
|
}
|
||||||
|
return results;
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function findAsyncSequential<T>(
|
||||||
|
array: T[],
|
||||||
|
predicate: (t: T) => Promise<boolean>,
|
||||||
|
): Promise<T | undefined> {
|
||||||
|
for (const t of array) {
|
||||||
|
if (await predicate(t)) {
|
||||||
|
return t;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return undefined;
|
||||||
|
}
|
||||||
|
|
||||||
|
export function reportMessage(
|
||||||
|
message: string,
|
||||||
|
reportingSeverity: ReportingSeverity,
|
||||||
|
): void {
|
||||||
|
switch (reportingSeverity) {
|
||||||
|
case 'ignore':
|
||||||
|
break;
|
||||||
|
case 'log':
|
||||||
|
logger.info(message);
|
||||||
|
break;
|
||||||
|
case 'warn':
|
||||||
|
logger.warn(message);
|
||||||
|
break;
|
||||||
|
case 'error':
|
||||||
|
logger.error(message);
|
||||||
|
break;
|
||||||
|
case 'throw':
|
||||||
|
throw new Error(message);
|
||||||
|
default:
|
||||||
|
throw new Error(
|
||||||
|
`Unexpected "reportingSeverity" value: ${reportingSeverity}.`,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
|
@ -5,10 +5,9 @@
|
||||||
* LICENSE file in the root directory of this source tree.
|
* LICENSE file in the root directory of this source tree.
|
||||||
*/
|
*/
|
||||||
|
|
||||||
// Based on https://github.com/gatsbyjs/gatsby/pull/21518/files
|
|
||||||
|
|
||||||
import path from 'path';
|
import path from 'path';
|
||||||
|
|
||||||
|
// Based on https://github.com/gatsbyjs/gatsby/pull/21518/files
|
||||||
// MacOS (APFS) and Windows (NTFS) filename length limit = 255 chars,
|
// MacOS (APFS) and Windows (NTFS) filename length limit = 255 chars,
|
||||||
// Others = 255 bytes
|
// Others = 255 bytes
|
||||||
const MAX_PATH_SEGMENT_CHARS = 255;
|
const MAX_PATH_SEGMENT_CHARS = 255;
|
||||||
|
@ -113,3 +112,10 @@ export function escapePath(str: string): string {
|
||||||
// Remove the " around the json string;
|
// Remove the " around the json string;
|
||||||
return escaped.substring(1, escaped.length - 1);
|
return escaped.substring(1, escaped.length - 1);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
export function addTrailingPathSeparator(str: string): string {
|
||||||
|
return str.endsWith(path.sep)
|
||||||
|
? str
|
||||||
|
: // If this is Windows, we need to change the forward slash to backward
|
||||||
|
`${str.replace(/\/$/, '')}${path.sep}`;
|
||||||
|
}
|
||||||
|
|
|
@ -5,6 +5,9 @@
|
||||||
* LICENSE file in the root directory of this source tree.
|
* LICENSE file in the root directory of this source tree.
|
||||||
*/
|
*/
|
||||||
|
|
||||||
|
import {removeSuffix} from './jsUtils';
|
||||||
|
import resolvePathnameUnsafe from 'resolve-pathname';
|
||||||
|
|
||||||
export function normalizeUrl(rawUrls: string[]): string {
|
export function normalizeUrl(rawUrls: string[]): string {
|
||||||
const urls = [...rawUrls];
|
const urls = [...rawUrls];
|
||||||
const resultArray = [];
|
const resultArray = [];
|
||||||
|
@ -94,3 +97,53 @@ export function getEditUrl(
|
||||||
normalizeUrl([editUrl, fileRelativePath.replace(/\\/g, '/')])
|
normalizeUrl([editUrl, fileRelativePath.replace(/\\/g, '/')])
|
||||||
: undefined;
|
: undefined;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Convert filepath to url path.
|
||||||
|
* Example: 'index.md' -> '/', 'foo/bar.js' -> '/foo/bar',
|
||||||
|
*/
|
||||||
|
export function fileToPath(file: string): string {
|
||||||
|
const indexRE = /(?<dirname>^|.*\/)index\.(?:mdx?|jsx?|tsx?)$/i;
|
||||||
|
const extRE = /\.(?:mdx?|jsx?|tsx?)$/;
|
||||||
|
|
||||||
|
if (indexRE.test(file)) {
|
||||||
|
return file.replace(indexRE, '/$1');
|
||||||
|
}
|
||||||
|
return `/${file.replace(extRE, '').replace(/\\/g, '/')}`;
|
||||||
|
}
|
||||||
|
|
||||||
|
export function encodePath(userPath: string): string {
|
||||||
|
return userPath
|
||||||
|
.split('/')
|
||||||
|
.map((item) => encodeURIComponent(item))
|
||||||
|
.join('/');
|
||||||
|
}
|
||||||
|
|
||||||
|
export function isValidPathname(str: string): boolean {
|
||||||
|
if (!str.startsWith('/')) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
try {
|
||||||
|
// weird, but is there a better way?
|
||||||
|
const parsedPathname = new URL(str, 'https://domain.com').pathname;
|
||||||
|
return parsedPathname === str || parsedPathname === encodeURI(str);
|
||||||
|
} catch {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// resolve pathname and fail fast if resolution fails
|
||||||
|
export function resolvePathname(to: string, from?: string): string {
|
||||||
|
return resolvePathnameUnsafe(to, from);
|
||||||
|
}
|
||||||
|
export function addLeadingSlash(str: string): string {
|
||||||
|
return str.startsWith('/') ? str : `/${str}`;
|
||||||
|
}
|
||||||
|
|
||||||
|
// TODO deduplicate: also present in @docusaurus/utils-common
|
||||||
|
export function addTrailingSlash(str: string): string {
|
||||||
|
return str.endsWith('/') ? str : `${str}/`;
|
||||||
|
}
|
||||||
|
export function removeTrailingSlash(str: string): string {
|
||||||
|
return removeSuffix(str, '/');
|
||||||
|
}
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue