mirror of
https://github.com/facebook/docusaurus.git
synced 2025-04-30 02:37:59 +02:00
refactor(utils): reorganize functions; move authors file resolution to utils (#6229)
* refactor(utils): reorganize functions; move authors file resolution to utils * More refactor
This commit is contained in:
parent
7adc1c0cdb
commit
24d65d9bdd
39 changed files with 533 additions and 747 deletions
|
@ -36,7 +36,7 @@
|
|||
"eslint-plugin-jsx-a11y": "^6.5.1",
|
||||
"eslint-plugin-react": "^7.27.0",
|
||||
"eslint-plugin-react-hooks": "^4.3.0",
|
||||
"prettier": "^2.5.0",
|
||||
"prettier": "^2.5.1",
|
||||
"stylelint": "^13.2.1"
|
||||
},
|
||||
"browserslist": {
|
||||
|
@ -52,4 +52,4 @@
|
|||
]
|
||||
},
|
||||
"description": "Docusaurus example project (facebook template)"
|
||||
}
|
||||
}
|
||||
|
|
|
@ -109,7 +109,7 @@
|
|||
"netlify-cli": "^8.0.5",
|
||||
"nodemon": "^2.0.13",
|
||||
"npm-run-all": "^4.1.5",
|
||||
"prettier": "^2.5.0",
|
||||
"prettier": "^2.5.1",
|
||||
"react": "^17.0.1",
|
||||
"react-dom": "^17.0.1",
|
||||
"react-test-renderer": "^17.0.2",
|
||||
|
|
|
@ -35,7 +35,7 @@
|
|||
"eslint-plugin-jsx-a11y": "^6.5.1",
|
||||
"eslint-plugin-react": "^7.27.0",
|
||||
"eslint-plugin-react-hooks": "^4.3.0",
|
||||
"prettier": "^2.5.0",
|
||||
"prettier": "^2.5.1",
|
||||
"stylelint": "^13.2.1"
|
||||
},
|
||||
"browserslist": {
|
||||
|
|
|
@ -27,7 +27,6 @@
|
|||
"feed": "^4.2.2",
|
||||
"fs-extra": "^10.0.0",
|
||||
"globby": "^11.0.2",
|
||||
"js-yaml": "^4.0.0",
|
||||
"loader-utils": "^2.0.0",
|
||||
"lodash": "^4.17.20",
|
||||
"reading-time": "^1.5.0",
|
||||
|
|
|
@ -6,12 +6,10 @@
|
|||
*/
|
||||
|
||||
import {
|
||||
AuthorsMap,
|
||||
getAuthorsMapFilePath,
|
||||
validateAuthorsMapFile,
|
||||
readAuthorsMapFile,
|
||||
type AuthorsMap,
|
||||
getAuthorsMap,
|
||||
getBlogPostAuthors,
|
||||
validateAuthorsMap,
|
||||
} from '../authors';
|
||||
import path from 'path';
|
||||
|
||||
|
@ -282,80 +280,6 @@ describe('getBlogPostAuthors', () => {
|
|||
});
|
||||
});
|
||||
|
||||
describe('readAuthorsMapFile', () => {
|
||||
const fixturesDir = path.join(__dirname, '__fixtures__/authorsMapFiles');
|
||||
|
||||
test('read valid yml author file', async () => {
|
||||
const filePath = path.join(fixturesDir, 'authors.yml');
|
||||
expect(await readAuthorsMapFile(filePath)).toBeDefined();
|
||||
});
|
||||
|
||||
test('read valid json author file', async () => {
|
||||
const filePath = path.join(fixturesDir, 'authors.json');
|
||||
expect(await readAuthorsMapFile(filePath)).toBeDefined();
|
||||
});
|
||||
|
||||
test('read yml and json should lead to the same result', async () => {
|
||||
const content1 = await readAuthorsMapFile(
|
||||
path.join(fixturesDir, 'authors.yml'),
|
||||
);
|
||||
const content2 = await readAuthorsMapFile(
|
||||
path.join(fixturesDir, 'authors.json'),
|
||||
);
|
||||
expect(content1).toEqual(content2);
|
||||
});
|
||||
|
||||
test('fail to read invalid yml 1', async () => {
|
||||
const filePath = path.join(fixturesDir, 'authorsBad1.yml');
|
||||
await expect(
|
||||
readAuthorsMapFile(filePath),
|
||||
).rejects.toThrowErrorMatchingInlineSnapshot(
|
||||
`"\\"slorber.name\\" is required"`,
|
||||
);
|
||||
});
|
||||
test('fail to read invalid json 1', async () => {
|
||||
const filePath = path.join(fixturesDir, 'authorsBad1.json');
|
||||
await expect(
|
||||
readAuthorsMapFile(filePath),
|
||||
).rejects.toThrowErrorMatchingInlineSnapshot(
|
||||
`"\\"slorber.name\\" is required"`,
|
||||
);
|
||||
});
|
||||
|
||||
test('fail to read invalid yml 2', async () => {
|
||||
const filePath = path.join(fixturesDir, 'authorsBad2.yml');
|
||||
await expect(
|
||||
readAuthorsMapFile(filePath),
|
||||
).rejects.toThrowErrorMatchingInlineSnapshot(
|
||||
`"\\"name\\" must be of type object"`,
|
||||
);
|
||||
});
|
||||
test('fail to read invalid json 2', async () => {
|
||||
const filePath = path.join(fixturesDir, 'authorsBad2.json');
|
||||
await expect(
|
||||
readAuthorsMapFile(filePath),
|
||||
).rejects.toThrowErrorMatchingInlineSnapshot(
|
||||
`"\\"name\\" must be of type object"`,
|
||||
);
|
||||
});
|
||||
|
||||
test('fail to read invalid yml 3', async () => {
|
||||
const filePath = path.join(fixturesDir, 'authorsBad3.yml');
|
||||
await expect(
|
||||
readAuthorsMapFile(filePath),
|
||||
).rejects.toThrowErrorMatchingInlineSnapshot(
|
||||
`"\\"value\\" must be of type object"`,
|
||||
);
|
||||
});
|
||||
test('fail to read invalid json 3', async () => {
|
||||
const filePath = path.join(fixturesDir, 'authorsBad3.json');
|
||||
await expect(
|
||||
readAuthorsMapFile(filePath),
|
||||
).rejects.toThrowErrorMatchingInlineSnapshot(
|
||||
`"\\"value\\" must be of type object"`,
|
||||
);
|
||||
});
|
||||
});
|
||||
describe('getAuthorsMap', () => {
|
||||
const fixturesDir = path.join(__dirname, '__fixtures__/authorsMapFiles');
|
||||
const contentPaths = {
|
||||
|
@ -391,7 +315,7 @@ describe('getAuthorsMap', () => {
|
|||
});
|
||||
});
|
||||
|
||||
describe('validateAuthorsMapFile', () => {
|
||||
describe('validateAuthorsMap', () => {
|
||||
test('accept valid authors map', () => {
|
||||
const authorsMap: AuthorsMap = {
|
||||
slorber: {
|
||||
|
@ -411,7 +335,7 @@ describe('validateAuthorsMapFile', () => {
|
|||
hello: new Date(),
|
||||
},
|
||||
};
|
||||
expect(validateAuthorsMapFile(authorsMap)).toEqual(authorsMap);
|
||||
expect(validateAuthorsMap(authorsMap)).toEqual(authorsMap);
|
||||
});
|
||||
|
||||
test('rename snake case image_url to camelCase imageURL', () => {
|
||||
|
@ -421,7 +345,7 @@ describe('validateAuthorsMapFile', () => {
|
|||
image_url: 'https://github.com/slorber.png',
|
||||
},
|
||||
};
|
||||
expect(validateAuthorsMapFile(authorsMap)).toEqual({
|
||||
expect(validateAuthorsMap(authorsMap)).toEqual({
|
||||
slorber: {
|
||||
name: 'Sébastien Lorber',
|
||||
imageURL: 'https://github.com/slorber.png',
|
||||
|
@ -436,13 +360,13 @@ describe('validateAuthorsMapFile', () => {
|
|||
},
|
||||
};
|
||||
expect(() =>
|
||||
validateAuthorsMapFile(authorsMap),
|
||||
validateAuthorsMap(authorsMap),
|
||||
).toThrowErrorMatchingInlineSnapshot(`"\\"slorber.name\\" is required"`);
|
||||
});
|
||||
|
||||
test('reject undefined author', () => {
|
||||
expect(() =>
|
||||
validateAuthorsMapFile({
|
||||
validateAuthorsMap({
|
||||
slorber: undefined,
|
||||
}),
|
||||
).toThrowErrorMatchingInlineSnapshot(`"\\"slorber\\" is required"`);
|
||||
|
@ -450,7 +374,7 @@ describe('validateAuthorsMapFile', () => {
|
|||
|
||||
test('reject null author', () => {
|
||||
expect(() =>
|
||||
validateAuthorsMapFile({
|
||||
validateAuthorsMap({
|
||||
slorber: null,
|
||||
}),
|
||||
).toThrowErrorMatchingInlineSnapshot(
|
||||
|
@ -460,14 +384,14 @@ describe('validateAuthorsMapFile', () => {
|
|||
|
||||
test('reject array author', () => {
|
||||
expect(() =>
|
||||
validateAuthorsMapFile({slorber: []}),
|
||||
validateAuthorsMap({slorber: []}),
|
||||
).toThrowErrorMatchingInlineSnapshot(
|
||||
`"\\"slorber\\" must be of type object"`,
|
||||
);
|
||||
});
|
||||
|
||||
test('reject array content', () => {
|
||||
expect(() => validateAuthorsMapFile([])).toThrowErrorMatchingInlineSnapshot(
|
||||
expect(() => validateAuthorsMap([])).toThrowErrorMatchingInlineSnapshot(
|
||||
// TODO improve this error message
|
||||
`"\\"value\\" must be of type object"`,
|
||||
);
|
||||
|
@ -475,7 +399,7 @@ describe('validateAuthorsMapFile', () => {
|
|||
|
||||
test('reject flat author', () => {
|
||||
expect(() =>
|
||||
validateAuthorsMapFile({name: 'Sébastien'}),
|
||||
validateAuthorsMap({name: 'Sébastien'}),
|
||||
).toThrowErrorMatchingInlineSnapshot(
|
||||
// TODO improve this error message
|
||||
`"\\"name\\" must be of type object"`,
|
||||
|
@ -488,121 +412,9 @@ describe('validateAuthorsMapFile', () => {
|
|||
slorber: [],
|
||||
};
|
||||
expect(() =>
|
||||
validateAuthorsMapFile(authorsMap),
|
||||
validateAuthorsMap(authorsMap),
|
||||
).toThrowErrorMatchingInlineSnapshot(
|
||||
`"\\"slorber\\" must be of type object"`,
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe('getAuthorsMapFilePath', () => {
|
||||
const fixturesDir = path.join(
|
||||
__dirname,
|
||||
'__fixtures__/getAuthorsMapFilePath',
|
||||
);
|
||||
const contentPathYml1 = path.join(fixturesDir, 'contentPathYml1');
|
||||
const contentPathYml2 = path.join(fixturesDir, 'contentPathYml2');
|
||||
const contentPathJson1 = path.join(fixturesDir, 'contentPathJson1');
|
||||
const contentPathJson2 = path.join(fixturesDir, 'contentPathJson2');
|
||||
const contentPathEmpty = path.join(fixturesDir, 'contentPathEmpty');
|
||||
const contentPathNestedYml = path.join(fixturesDir, 'contentPathNestedYml');
|
||||
|
||||
test('getAuthorsMapFilePath returns localized Yml path in priority', async () => {
|
||||
expect(
|
||||
await getAuthorsMapFilePath({
|
||||
authorsMapPath: 'authors.yml',
|
||||
contentPaths: {
|
||||
contentPathLocalized: contentPathYml1,
|
||||
contentPath: contentPathYml2,
|
||||
},
|
||||
}),
|
||||
).toEqual(path.join(contentPathYml1, 'authors.yml'));
|
||||
expect(
|
||||
await getAuthorsMapFilePath({
|
||||
authorsMapPath: 'authors.yml',
|
||||
contentPaths: {
|
||||
contentPathLocalized: contentPathYml2,
|
||||
contentPath: contentPathYml1,
|
||||
},
|
||||
}),
|
||||
).toEqual(path.join(contentPathYml2, 'authors.yml'));
|
||||
});
|
||||
|
||||
test('getAuthorsMapFilePath returns localized Json path in priority', async () => {
|
||||
expect(
|
||||
await getAuthorsMapFilePath({
|
||||
authorsMapPath: 'authors.json',
|
||||
contentPaths: {
|
||||
contentPathLocalized: contentPathJson1,
|
||||
contentPath: contentPathJson2,
|
||||
},
|
||||
}),
|
||||
).toEqual(path.join(contentPathJson1, 'authors.json'));
|
||||
expect(
|
||||
await getAuthorsMapFilePath({
|
||||
authorsMapPath: 'authors.json',
|
||||
contentPaths: {
|
||||
contentPathLocalized: contentPathJson2,
|
||||
contentPath: contentPathJson1,
|
||||
},
|
||||
}),
|
||||
).toEqual(path.join(contentPathJson2, 'authors.json'));
|
||||
});
|
||||
|
||||
test('getAuthorsMapFilePath returns unlocalized Yml path as fallback', async () => {
|
||||
expect(
|
||||
await getAuthorsMapFilePath({
|
||||
authorsMapPath: 'authors.yml',
|
||||
contentPaths: {
|
||||
contentPathLocalized: contentPathEmpty,
|
||||
contentPath: contentPathYml2,
|
||||
},
|
||||
}),
|
||||
).toEqual(path.join(contentPathYml2, 'authors.yml'));
|
||||
});
|
||||
|
||||
test('getAuthorsMapFilePath returns unlocalized Json path as fallback', async () => {
|
||||
expect(
|
||||
await getAuthorsMapFilePath({
|
||||
authorsMapPath: 'authors.json',
|
||||
contentPaths: {
|
||||
contentPathLocalized: contentPathEmpty,
|
||||
contentPath: contentPathJson1,
|
||||
},
|
||||
}),
|
||||
).toEqual(path.join(contentPathJson1, 'authors.json'));
|
||||
});
|
||||
|
||||
test('getAuthorsMapFilePath can return undefined (file not found)', async () => {
|
||||
expect(
|
||||
await getAuthorsMapFilePath({
|
||||
authorsMapPath: 'authors.json',
|
||||
contentPaths: {
|
||||
contentPathLocalized: contentPathEmpty,
|
||||
contentPath: contentPathYml1,
|
||||
},
|
||||
}),
|
||||
).toBeUndefined();
|
||||
expect(
|
||||
await getAuthorsMapFilePath({
|
||||
authorsMapPath: 'authors.yml',
|
||||
contentPaths: {
|
||||
contentPathLocalized: contentPathEmpty,
|
||||
contentPath: contentPathJson1,
|
||||
},
|
||||
}),
|
||||
).toBeUndefined();
|
||||
});
|
||||
|
||||
test('getAuthorsMapFilePath can return nested path', async () => {
|
||||
expect(
|
||||
await getAuthorsMapFilePath({
|
||||
authorsMapPath: 'sub/folder/authors.yml',
|
||||
contentPaths: {
|
||||
contentPathLocalized: contentPathEmpty,
|
||||
contentPath: contentPathNestedYml,
|
||||
},
|
||||
}),
|
||||
).toEqual(path.join(contentPathNestedYml, 'sub/folder/authors.yml'));
|
||||
});
|
||||
});
|
||||
|
|
|
@ -5,19 +5,14 @@
|
|||
* LICENSE file in the root directory of this source tree.
|
||||
*/
|
||||
|
||||
import fs from 'fs-extra';
|
||||
import logger from '@docusaurus/logger';
|
||||
import path from 'path';
|
||||
import {Author, BlogContentPaths} from './types';
|
||||
import {findFolderContainingFile} from '@docusaurus/utils';
|
||||
import {getDataFileData} from '@docusaurus/utils';
|
||||
import {Joi, URISchema} from '@docusaurus/utils-validation';
|
||||
import {
|
||||
BlogPostFrontMatter,
|
||||
BlogPostFrontMatterAuthor,
|
||||
BlogPostFrontMatterAuthors,
|
||||
} from './blogFrontMatter';
|
||||
import {getContentPathList} from './blogUtils';
|
||||
import Yaml from 'js-yaml';
|
||||
|
||||
export type AuthorsMap = Record<string, Author>;
|
||||
|
||||
|
@ -34,63 +29,22 @@ const AuthorsMapSchema = Joi.object<AuthorsMap>().pattern(
|
|||
.required(),
|
||||
);
|
||||
|
||||
export function validateAuthorsMapFile(content: unknown): AuthorsMap {
|
||||
export function validateAuthorsMap(content: unknown): AuthorsMap {
|
||||
return Joi.attempt(content, AuthorsMapSchema);
|
||||
}
|
||||
|
||||
export async function readAuthorsMapFile(
|
||||
filePath: string,
|
||||
): Promise<AuthorsMap | undefined> {
|
||||
if (await fs.pathExists(filePath)) {
|
||||
const contentString = await fs.readFile(filePath, {encoding: 'utf8'});
|
||||
try {
|
||||
const unsafeContent = Yaml.load(contentString);
|
||||
return validateAuthorsMapFile(unsafeContent);
|
||||
} catch (e) {
|
||||
// TODO replace later by error cause: see https://v8.dev/features/error-cause
|
||||
logger.error('The author list file looks invalid!');
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
return undefined;
|
||||
}
|
||||
|
||||
type AuthorsMapParams = {
|
||||
export async function getAuthorsMap(params: {
|
||||
authorsMapPath: string;
|
||||
contentPaths: BlogContentPaths;
|
||||
};
|
||||
|
||||
export async function getAuthorsMapFilePath({
|
||||
authorsMapPath,
|
||||
contentPaths,
|
||||
}: AuthorsMapParams): Promise<string | undefined> {
|
||||
// Useful to load an eventually localize authors map
|
||||
const contentPath = await findFolderContainingFile(
|
||||
getContentPathList(contentPaths),
|
||||
authorsMapPath,
|
||||
}): Promise<AuthorsMap | undefined> {
|
||||
return getDataFileData(
|
||||
{
|
||||
filePath: params.authorsMapPath,
|
||||
contentPaths: params.contentPaths,
|
||||
fileType: 'authors map',
|
||||
},
|
||||
validateAuthorsMap,
|
||||
);
|
||||
|
||||
if (contentPath) {
|
||||
return path.join(contentPath, authorsMapPath);
|
||||
}
|
||||
|
||||
return undefined;
|
||||
}
|
||||
|
||||
export async function getAuthorsMap(
|
||||
params: AuthorsMapParams,
|
||||
): Promise<AuthorsMap | undefined> {
|
||||
const filePath = await getAuthorsMapFilePath(params);
|
||||
if (!filePath) {
|
||||
return undefined;
|
||||
}
|
||||
try {
|
||||
return await readAuthorsMapFile(filePath);
|
||||
} catch (e) {
|
||||
// TODO replace later by error cause, see https://v8.dev/features/error-cause
|
||||
logger.error`Couldn't read blog authors map at path=${filePath}`;
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
|
||||
type AuthorsParam = {
|
||||
|
|
|
@ -28,6 +28,7 @@ import {
|
|||
Globby,
|
||||
normalizeFrontMatterTags,
|
||||
groupTaggedItems,
|
||||
getContentPathList,
|
||||
} from '@docusaurus/utils';
|
||||
import {LoadContext} from '@docusaurus/types';
|
||||
import {validateBlogPostFrontMatter} from './blogFrontMatter';
|
||||
|
@ -318,8 +319,3 @@ export function linkify({
|
|||
|
||||
return newContent;
|
||||
}
|
||||
|
||||
// Order matters: we look in priority in localized folder
|
||||
export function getContentPathList(contentPaths: BlogContentPaths): string[] {
|
||||
return [contentPaths.contentPathLocalized, contentPaths.contentPath];
|
||||
}
|
||||
|
|
|
@ -16,6 +16,8 @@ import {
|
|||
posixPath,
|
||||
addTrailingPathSeparator,
|
||||
createAbsoluteFilePathMatcher,
|
||||
getContentPathList,
|
||||
getDataFilePath,
|
||||
DEFAULT_PLUGIN_ID,
|
||||
} from '@docusaurus/utils';
|
||||
import {translateContent, getTranslationFiles} from './translations';
|
||||
|
@ -45,13 +47,11 @@ import {
|
|||
import {Configuration} from 'webpack';
|
||||
import {
|
||||
generateBlogPosts,
|
||||
getContentPathList,
|
||||
getSourceToPermalink,
|
||||
getBlogTags,
|
||||
} from './blogUtils';
|
||||
import {BlogPostFrontMatter} from './blogFrontMatter';
|
||||
import {createBlogFeedFiles} from './feed';
|
||||
import {getAuthorsMapFilePath} from './authors';
|
||||
|
||||
export default async function pluginContentBlog(
|
||||
context: LoadContext,
|
||||
|
@ -90,8 +90,8 @@ export default async function pluginContentBlog(
|
|||
const aliasedSource = (source: string) =>
|
||||
`~blog/${posixPath(path.relative(pluginDataDirRoot, source))}`;
|
||||
|
||||
const authorsMapFilePath = await getAuthorsMapFilePath({
|
||||
authorsMapPath: options.authorsMapPath,
|
||||
const authorsMapFilePath = await getDataFilePath({
|
||||
filePath: options.authorsMapPath,
|
||||
contentPaths,
|
||||
});
|
||||
|
||||
|
|
|
@ -21,12 +21,12 @@
|
|||
"@docusaurus/logger": "2.0.0-beta.14",
|
||||
"@mdx-js/runtime": "^1.6.22",
|
||||
"@svgr/webpack": "^6.0.0",
|
||||
"escape-string-regexp": "^4.0.0",
|
||||
"file-loader": "^6.2.0",
|
||||
"fs-extra": "^10.0.0",
|
||||
"github-slugger": "^1.4.0",
|
||||
"globby": "^11.0.4",
|
||||
"gray-matter": "^4.0.3",
|
||||
"js-yaml": "^4.0.0",
|
||||
"lodash": "^4.17.20",
|
||||
"micromatch": "^4.0.4",
|
||||
"remark-mdx-remove-exports": "^1.6.22",
|
||||
|
|
|
@ -0,0 +1 @@
|
|||
{"a": 2}
|
|
@ -0,0 +1 @@
|
|||
a: 2
|
|
@ -0,0 +1 @@
|
|||
{"a": 1}
|
|
@ -0,0 +1 @@
|
|||
a: 1
|
|
@ -1,8 +0,0 @@
|
|||
// Jest Snapshot v1, https://goo.gl/fbAQLP
|
||||
|
||||
exports[`getFolderContainingFile throw if no folder contain such file 1`] = `
|
||||
"File \\"index.test.ts\\" does not exist in any of these folders:
|
||||
- /abcdef
|
||||
- /gehij
|
||||
- /klmn]"
|
||||
`;
|
202
packages/docusaurus-utils/src/__tests__/dataFileUtils.test.ts
Normal file
202
packages/docusaurus-utils/src/__tests__/dataFileUtils.test.ts
Normal file
|
@ -0,0 +1,202 @@
|
|||
/**
|
||||
* Copyright (c) Facebook, Inc. and its affiliates.
|
||||
*
|
||||
* This source code is licensed under the MIT license found in the
|
||||
* LICENSE file in the root directory of this source tree.
|
||||
*/
|
||||
|
||||
import path from 'path';
|
||||
import {
|
||||
findFolderContainingFile,
|
||||
getFolderContainingFile,
|
||||
getDataFilePath,
|
||||
getDataFileData,
|
||||
} from '../dataFileUtils';
|
||||
|
||||
describe('getDataFilePath', () => {
|
||||
const fixturesDir = path.join(__dirname, '__fixtures__/dataFiles');
|
||||
const contentPathYml1 = path.join(fixturesDir, 'contentPathYml1');
|
||||
const contentPathYml2 = path.join(fixturesDir, 'contentPathYml2');
|
||||
const contentPathJson1 = path.join(fixturesDir, 'contentPathJson1');
|
||||
const contentPathJson2 = path.join(fixturesDir, 'contentPathJson2');
|
||||
const contentPathEmpty = path.join(fixturesDir, 'contentPathEmpty');
|
||||
const contentPathNestedYml = path.join(fixturesDir, 'contentPathNestedYml');
|
||||
|
||||
test('getDataFilePath returns localized Yml path in priority', async () => {
|
||||
expect(
|
||||
await getDataFilePath({
|
||||
filePath: 'authors.yml',
|
||||
contentPaths: {
|
||||
contentPathLocalized: contentPathYml1,
|
||||
contentPath: contentPathYml2,
|
||||
},
|
||||
}),
|
||||
).toEqual(path.join(contentPathYml1, 'authors.yml'));
|
||||
expect(
|
||||
await getDataFilePath({
|
||||
filePath: 'authors.yml',
|
||||
contentPaths: {
|
||||
contentPathLocalized: contentPathYml2,
|
||||
contentPath: contentPathYml1,
|
||||
},
|
||||
}),
|
||||
).toEqual(path.join(contentPathYml2, 'authors.yml'));
|
||||
});
|
||||
|
||||
test('getDataFilePath returns localized Json path in priority', async () => {
|
||||
expect(
|
||||
await getDataFilePath({
|
||||
filePath: 'authors.json',
|
||||
contentPaths: {
|
||||
contentPathLocalized: contentPathJson1,
|
||||
contentPath: contentPathJson2,
|
||||
},
|
||||
}),
|
||||
).toEqual(path.join(contentPathJson1, 'authors.json'));
|
||||
expect(
|
||||
await getDataFilePath({
|
||||
filePath: 'authors.json',
|
||||
contentPaths: {
|
||||
contentPathLocalized: contentPathJson2,
|
||||
contentPath: contentPathJson1,
|
||||
},
|
||||
}),
|
||||
).toEqual(path.join(contentPathJson2, 'authors.json'));
|
||||
});
|
||||
|
||||
test('getDataFilePath returns unlocalized Yml path as fallback', async () => {
|
||||
expect(
|
||||
await getDataFilePath({
|
||||
filePath: 'authors.yml',
|
||||
contentPaths: {
|
||||
contentPathLocalized: contentPathEmpty,
|
||||
contentPath: contentPathYml2,
|
||||
},
|
||||
}),
|
||||
).toEqual(path.join(contentPathYml2, 'authors.yml'));
|
||||
});
|
||||
|
||||
test('getDataFilePath returns unlocalized Json path as fallback', async () => {
|
||||
expect(
|
||||
await getDataFilePath({
|
||||
filePath: 'authors.json',
|
||||
contentPaths: {
|
||||
contentPathLocalized: contentPathEmpty,
|
||||
contentPath: contentPathJson1,
|
||||
},
|
||||
}),
|
||||
).toEqual(path.join(contentPathJson1, 'authors.json'));
|
||||
});
|
||||
|
||||
test('getDataFilePath can return undefined (file not found)', async () => {
|
||||
expect(
|
||||
await getDataFilePath({
|
||||
filePath: 'authors.json',
|
||||
contentPaths: {
|
||||
contentPathLocalized: contentPathEmpty,
|
||||
contentPath: contentPathYml1,
|
||||
},
|
||||
}),
|
||||
).toBeUndefined();
|
||||
expect(
|
||||
await getDataFilePath({
|
||||
filePath: 'authors.yml',
|
||||
contentPaths: {
|
||||
contentPathLocalized: contentPathEmpty,
|
||||
contentPath: contentPathJson1,
|
||||
},
|
||||
}),
|
||||
).toBeUndefined();
|
||||
});
|
||||
|
||||
test('getDataFilePath can return nested path', async () => {
|
||||
expect(
|
||||
await getDataFilePath({
|
||||
filePath: 'sub/folder/authors.yml',
|
||||
contentPaths: {
|
||||
contentPathLocalized: contentPathEmpty,
|
||||
contentPath: contentPathNestedYml,
|
||||
},
|
||||
}),
|
||||
).toEqual(path.join(contentPathNestedYml, 'sub/folder/authors.yml'));
|
||||
});
|
||||
});
|
||||
|
||||
describe('getDataFileData', () => {
|
||||
const fixturesDir = path.join(__dirname, '__fixtures__/dataFiles/actualData');
|
||||
function readDataFile(filePath: string) {
|
||||
return getDataFileData(
|
||||
{
|
||||
filePath,
|
||||
contentPaths: {contentPath: fixturesDir, contentPathLocalized: ''},
|
||||
fileType: 'test',
|
||||
},
|
||||
(content) => {
|
||||
// @ts-expect-error: good enough
|
||||
if (content.a !== 1) {
|
||||
throw new Error('Nope');
|
||||
}
|
||||
return content;
|
||||
},
|
||||
);
|
||||
}
|
||||
|
||||
test('read valid yml author file', async () => {
|
||||
await expect(readDataFile('valid.yml')).resolves.toEqual({a: 1});
|
||||
});
|
||||
|
||||
test('read valid json author file', async () => {
|
||||
await expect(readDataFile('valid.json')).resolves.toEqual({a: 1});
|
||||
});
|
||||
|
||||
test('fail to read invalid yml', async () => {
|
||||
await expect(
|
||||
readDataFile('bad.yml'),
|
||||
).rejects.toThrowErrorMatchingInlineSnapshot(`"Nope"`);
|
||||
});
|
||||
|
||||
test('fail to read invalid json', async () => {
|
||||
await expect(
|
||||
readDataFile('bad.json'),
|
||||
).rejects.toThrowErrorMatchingInlineSnapshot(`"Nope"`);
|
||||
});
|
||||
});
|
||||
|
||||
describe('findFolderContainingFile', () => {
|
||||
test('find appropriate folder', async () => {
|
||||
await expect(
|
||||
findFolderContainingFile(
|
||||
['/abcdef', '/gehij', __dirname, '/klmn'],
|
||||
'index.test.ts',
|
||||
),
|
||||
).resolves.toEqual(__dirname);
|
||||
});
|
||||
|
||||
test('return undefined if no folder contain such file', async () => {
|
||||
await expect(
|
||||
findFolderContainingFile(['/abcdef', '/gehij', '/klmn'], 'index.test.ts'),
|
||||
).resolves.toBeUndefined();
|
||||
});
|
||||
});
|
||||
|
||||
describe('getFolderContainingFile', () => {
|
||||
test('get appropriate folder', async () => {
|
||||
await expect(
|
||||
getFolderContainingFile(
|
||||
['/abcdef', '/gehij', __dirname, '/klmn'],
|
||||
'index.test.ts',
|
||||
),
|
||||
).resolves.toEqual(__dirname);
|
||||
});
|
||||
|
||||
test('throw if no folder contain such file', async () => {
|
||||
await expect(
|
||||
getFolderContainingFile(['/abcdef', '/gehij', '/klmn'], 'index.test.ts'),
|
||||
).rejects.toThrowErrorMatchingInlineSnapshot(`
|
||||
"File \\"index.test.ts\\" does not exist in any of these folders:
|
||||
- /abcdef
|
||||
- /gehij
|
||||
- /klmn]"
|
||||
`);
|
||||
});
|
||||
});
|
|
@ -1,25 +0,0 @@
|
|||
/**
|
||||
* Copyright (c) Facebook, Inc. and its affiliates.
|
||||
*
|
||||
* This source code is licensed under the MIT license found in the
|
||||
* LICENSE file in the root directory of this source tree.
|
||||
*/
|
||||
|
||||
import {escapePath} from '../escapePath';
|
||||
|
||||
describe('escapePath', () => {
|
||||
test('escapePath works', () => {
|
||||
const asserts: Record<string, string> = {
|
||||
'c:/aaaa\\bbbb': 'c:/aaaa\\\\bbbb',
|
||||
'c:\\aaaa\\bbbb\\★': 'c:\\\\aaaa\\\\bbbb\\\\★',
|
||||
'\\\\?\\c:\\aaaa\\bbbb': '\\\\\\\\?\\\\c:\\\\aaaa\\\\bbbb',
|
||||
'c:\\aaaa\\bbbb': 'c:\\\\aaaa\\\\bbbb',
|
||||
'foo\\bar': 'foo\\\\bar',
|
||||
'foo\\bar/lol': 'foo\\\\bar/lol',
|
||||
'website\\docs/**/*.{md,mdx}': 'website\\\\docs/**/*.{md,mdx}',
|
||||
};
|
||||
Object.keys(asserts).forEach((file) => {
|
||||
expect(escapePath(file)).toBe(asserts[file]);
|
||||
});
|
||||
});
|
||||
});
|
|
@ -5,16 +5,9 @@
|
|||
* LICENSE file in the root directory of this source tree.
|
||||
*/
|
||||
|
||||
import path from 'path';
|
||||
import {
|
||||
fileToPath,
|
||||
genComponentName,
|
||||
genChunkName,
|
||||
idx,
|
||||
getSubFolder,
|
||||
posixPath,
|
||||
objectWithKeySorted,
|
||||
aliasedSitePath,
|
||||
isValidPathname,
|
||||
addTrailingSlash,
|
||||
removeTrailingSlash,
|
||||
|
@ -23,47 +16,14 @@ import {
|
|||
addLeadingSlash,
|
||||
getElementsAround,
|
||||
mergeTranslations,
|
||||
mapAsyncSequencial,
|
||||
mapAsyncSequential,
|
||||
findAsyncSequential,
|
||||
findFolderContainingFile,
|
||||
getFolderContainingFile,
|
||||
updateTranslationFileMessages,
|
||||
parseMarkdownHeadingId,
|
||||
} from '../index';
|
||||
import {sum} from 'lodash';
|
||||
|
||||
describe('load utils', () => {
|
||||
test('aliasedSitePath', () => {
|
||||
const asserts: Record<string, string> = {
|
||||
'user/website/docs/asd.md': '@site/docs/asd.md',
|
||||
'user/website/versioned_docs/foo/bar.md':
|
||||
'@site/versioned_docs/foo/bar.md',
|
||||
'user/docs/test.md': '@site/../docs/test.md',
|
||||
};
|
||||
Object.keys(asserts).forEach((file) => {
|
||||
expect(posixPath(aliasedSitePath(file, 'user/website'))).toBe(
|
||||
asserts[file],
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
test('genComponentName', () => {
|
||||
const asserts: Record<string, string> = {
|
||||
'/': 'index',
|
||||
'/foo-bar': 'FooBar096',
|
||||
'/foo/bar': 'FooBar1Df',
|
||||
'/blog/2017/12/14/introducing-docusaurus':
|
||||
'Blog20171214IntroducingDocusaurus8D2',
|
||||
'/blog/2017/12/14-introducing-docusaurus':
|
||||
'Blog20171214IntroducingDocusaurus0Bc',
|
||||
'/blog/201712/14-introducing-docusaurus':
|
||||
'Blog20171214IntroducingDocusaurusA93',
|
||||
};
|
||||
Object.keys(asserts).forEach((file) => {
|
||||
expect(genComponentName(file)).toBe(asserts[file]);
|
||||
});
|
||||
});
|
||||
|
||||
test('fileToPath', () => {
|
||||
const asserts: Record<string, string> = {
|
||||
'index.md': '/',
|
||||
|
@ -80,41 +40,6 @@ describe('load utils', () => {
|
|||
});
|
||||
});
|
||||
|
||||
test('objectWithKeySorted', () => {
|
||||
const obj = {
|
||||
'/docs/adding-blog': '4',
|
||||
'/docs/versioning': '5',
|
||||
'/': '1',
|
||||
'/blog/2018': '3',
|
||||
'/youtube': '7',
|
||||
'/users/en/': '6',
|
||||
'/blog': '2',
|
||||
};
|
||||
expect(objectWithKeySorted(obj)).toMatchInlineSnapshot(`
|
||||
Object {
|
||||
"/": "1",
|
||||
"/blog": "2",
|
||||
"/blog/2018": "3",
|
||||
"/docs/adding-blog": "4",
|
||||
"/docs/versioning": "5",
|
||||
"/users/en/": "6",
|
||||
"/youtube": "7",
|
||||
}
|
||||
`);
|
||||
const obj2 = {
|
||||
b: 'foo',
|
||||
c: 'bar',
|
||||
a: 'baz',
|
||||
};
|
||||
expect(objectWithKeySorted(obj2)).toMatchInlineSnapshot(`
|
||||
Object {
|
||||
"a": "baz",
|
||||
"b": "foo",
|
||||
"c": "bar",
|
||||
}
|
||||
`);
|
||||
});
|
||||
|
||||
test('genChunkName', () => {
|
||||
const firstAssert: Record<string, string> = {
|
||||
'/docs/adding-blog': 'docs-adding-blog-062',
|
||||
|
@ -159,64 +84,6 @@ describe('load utils', () => {
|
|||
expect(genChunkName('d', undefined, undefined, true)).toBe('8277e091');
|
||||
});
|
||||
|
||||
test('idx', () => {
|
||||
const a = {};
|
||||
const b = {hello: 'world'};
|
||||
const obj = {
|
||||
translation: {
|
||||
enabled: true,
|
||||
enabledLanguages: [
|
||||
{
|
||||
enabled: true,
|
||||
name: 'English',
|
||||
tag: 'en',
|
||||
},
|
||||
{
|
||||
enabled: true,
|
||||
name: '日本語',
|
||||
tag: 'ja',
|
||||
},
|
||||
],
|
||||
},
|
||||
versioning: {
|
||||
enabled: false,
|
||||
versions: [],
|
||||
},
|
||||
};
|
||||
const test = {arr: [1, 2, 3]};
|
||||
const variable = 'enabledLanguages';
|
||||
expect(idx(a, ['b', 'c'])).toBeUndefined();
|
||||
expect(idx(b, ['hello'])).toEqual('world');
|
||||
expect(idx(b, 'hello')).toEqual('world');
|
||||
expect(idx(obj, 'typo')).toBeUndefined();
|
||||
expect(idx(obj, 'versioning')).toEqual({
|
||||
enabled: false,
|
||||
versions: [],
|
||||
});
|
||||
expect(idx(obj, ['translation', 'enabled'])).toEqual(true);
|
||||
expect(
|
||||
idx(obj, ['translation', variable]).map(
|
||||
(lang: {tag: string}) => lang.tag,
|
||||
),
|
||||
).toEqual(['en', 'ja']);
|
||||
expect(idx(test, ['arr', 0])).toEqual(1);
|
||||
expect(idx(undefined)).toBeUndefined();
|
||||
expect(idx(null)).toBeNull();
|
||||
});
|
||||
|
||||
test('getSubFolder', () => {
|
||||
const testA = path.join('folder', 'en', 'test.md');
|
||||
const testB = path.join('folder', 'ja', 'test.md');
|
||||
const testC = path.join('folder', 'ja', 'en', 'test.md');
|
||||
const testD = path.join('docs', 'ro', 'test.md');
|
||||
const testE = path.join('docs', 'test.md');
|
||||
expect(getSubFolder(testA, 'folder')).toBe('en');
|
||||
expect(getSubFolder(testB, 'folder')).toBe('ja');
|
||||
expect(getSubFolder(testC, 'folder')).toBe('ja');
|
||||
expect(getSubFolder(testD, 'docs')).toBe('ro');
|
||||
expect(getSubFolder(testE, 'docs')).toBeNull();
|
||||
});
|
||||
|
||||
test('isValidPathname', () => {
|
||||
expect(isValidPathname('/')).toBe(true);
|
||||
expect(isValidPathname('/hey')).toBe(true);
|
||||
|
@ -349,7 +216,7 @@ describe('mergeTranslations', () => {
|
|||
});
|
||||
});
|
||||
|
||||
describe('mapAsyncSequencial', () => {
|
||||
describe('mapAsyncSequential', () => {
|
||||
function sleep(timeout: number): Promise<void> {
|
||||
return new Promise((resolve) => {
|
||||
setTimeout(resolve, timeout);
|
||||
|
@ -369,7 +236,7 @@ describe('mapAsyncSequencial', () => {
|
|||
|
||||
const timeBefore = Date.now();
|
||||
await expect(
|
||||
mapAsyncSequencial(items, async (item) => {
|
||||
mapAsyncSequential(items, async (item) => {
|
||||
const itemTimeout = itemToTimeout[item];
|
||||
itemMapStartsAt[item] = Date.now();
|
||||
await sleep(itemTimeout);
|
||||
|
@ -419,40 +286,6 @@ describe('findAsyncSequencial', () => {
|
|||
});
|
||||
});
|
||||
|
||||
describe('findFolderContainingFile', () => {
|
||||
test('find appropriate folder', async () => {
|
||||
await expect(
|
||||
findFolderContainingFile(
|
||||
['/abcdef', '/gehij', __dirname, '/klmn'],
|
||||
'index.test.ts',
|
||||
),
|
||||
).resolves.toEqual(__dirname);
|
||||
});
|
||||
|
||||
test('return undefined if no folder contain such file', async () => {
|
||||
await expect(
|
||||
findFolderContainingFile(['/abcdef', '/gehij', '/klmn'], 'index.test.ts'),
|
||||
).resolves.toBeUndefined();
|
||||
});
|
||||
});
|
||||
|
||||
describe('getFolderContainingFile', () => {
|
||||
test('get appropriate folder', async () => {
|
||||
await expect(
|
||||
getFolderContainingFile(
|
||||
['/abcdef', '/gehij', __dirname, '/klmn'],
|
||||
'index.test.ts',
|
||||
),
|
||||
).resolves.toEqual(__dirname);
|
||||
});
|
||||
|
||||
test('throw if no folder contain such file', async () => {
|
||||
await expect(
|
||||
getFolderContainingFile(['/abcdef', '/gehij', '/klmn'], 'index.test.ts'),
|
||||
).rejects.toThrowErrorMatchingSnapshot();
|
||||
});
|
||||
});
|
||||
|
||||
describe('updateTranslationFileMessages', () => {
|
||||
test('should update messages', () => {
|
||||
expect(
|
||||
|
|
|
@ -5,10 +5,16 @@
|
|||
* LICENSE file in the root directory of this source tree.
|
||||
*/
|
||||
|
||||
import {isNameTooLong, shortName} from '../pathUtils';
|
||||
import {
|
||||
isNameTooLong,
|
||||
shortName,
|
||||
escapePath,
|
||||
posixPath,
|
||||
aliasedSitePath,
|
||||
} from '../pathUtils';
|
||||
|
||||
describe('pathUtils', () => {
|
||||
test('isNameTooLong', () => {
|
||||
describe('isNameTooLong', () => {
|
||||
test('behaves correctly', () => {
|
||||
const asserts: Record<string, boolean> = {
|
||||
'': false,
|
||||
'foo-bar-096': false,
|
||||
|
@ -26,40 +32,90 @@ describe('pathUtils', () => {
|
|||
expect(isNameTooLong(path)).toBe(asserts[path]);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('shortName', () => {
|
||||
test('works', () => {
|
||||
const asserts: Record<string, string> = {
|
||||
'': '',
|
||||
'foo-bar': 'foo-bar',
|
||||
'endi-lie': 'endi-lie',
|
||||
'yangshun-tay': 'yangshun-tay',
|
||||
'foo-bar-foo-bar-foo-bar-foo-bar-foo-bar-foo-bar-foo-bar-foo-bar-foo-bar-foo-bar-foo-bar-foo-bar-foo-bar-foo-bar-foo-bar-foo-bar-foo-bar-foo-bar-foo-bar-foo-bar-foo-bar-foo-bar-foo-bar-foo-bar-foo-bar-foo-bar-foo-bar-foo-bar-foo-bar-foo-bar-foo-bar-foo-bar':
|
||||
'foo-bar-foo-bar-foo-bar-foo-bar-foo-bar-foo-bar-foo-bar-foo-bar-foo-bar-foo-bar-foo-bar-foo-bar-foo-bar-foo-bar-foo-bar-foo-bar-foo-bar-foo-bar-foo-bar-foo-bar-foo-bar-foo-bar-foo-bar-foo-bar-foo-bar-foo-bar-foo-bar-foo-bar-foo-bar-foo-bar-foo-',
|
||||
'foo-bar-foo-bar-foo-bar-foo-bar-foo-bar-foo-bar-foo-bar-foo-bar-foo-bar-foo-bar-foo-bar-foo-bar-foo-bar-foo-bar-foo-bar-foo-bar-foo-bar-foo-bar-foo-bar-foo-bar-foo-bar-foo-bar-foo-bar-foo-bar-foo-bar-foo-bar-foo-bar-foo-bar-foo-bar-test-1-test-2':
|
||||
'foo-bar-foo-bar-foo-bar-foo-bar-foo-bar-foo-bar-foo-bar-foo-bar-foo-bar-foo-bar-foo-bar-foo-bar-foo-bar-foo-bar-foo-bar-foo-bar-foo-bar-foo-bar-foo-bar-foo-bar-foo-bar-foo-bar-foo-bar-foo-bar-foo-bar-foo-bar-foo-bar-foo-bar-foo-bar-test-1-test-',
|
||||
};
|
||||
Object.keys(asserts).forEach((file) => {
|
||||
expect(shortName(file)).toBe(asserts[file]);
|
||||
});
|
||||
describe('shortName', () => {
|
||||
test('works', () => {
|
||||
const asserts: Record<string, string> = {
|
||||
'': '',
|
||||
'foo-bar': 'foo-bar',
|
||||
'endi-lie': 'endi-lie',
|
||||
'yangshun-tay': 'yangshun-tay',
|
||||
'foo-bar-foo-bar-foo-bar-foo-bar-foo-bar-foo-bar-foo-bar-foo-bar-foo-bar-foo-bar-foo-bar-foo-bar-foo-bar-foo-bar-foo-bar-foo-bar-foo-bar-foo-bar-foo-bar-foo-bar-foo-bar-foo-bar-foo-bar-foo-bar-foo-bar-foo-bar-foo-bar-foo-bar-foo-bar-foo-bar-foo-bar-foo-bar':
|
||||
'foo-bar-foo-bar-foo-bar-foo-bar-foo-bar-foo-bar-foo-bar-foo-bar-foo-bar-foo-bar-foo-bar-foo-bar-foo-bar-foo-bar-foo-bar-foo-bar-foo-bar-foo-bar-foo-bar-foo-bar-foo-bar-foo-bar-foo-bar-foo-bar-foo-bar-foo-bar-foo-bar-foo-bar-foo-bar-foo-bar-foo-',
|
||||
'foo-bar-foo-bar-foo-bar-foo-bar-foo-bar-foo-bar-foo-bar-foo-bar-foo-bar-foo-bar-foo-bar-foo-bar-foo-bar-foo-bar-foo-bar-foo-bar-foo-bar-foo-bar-foo-bar-foo-bar-foo-bar-foo-bar-foo-bar-foo-bar-foo-bar-foo-bar-foo-bar-foo-bar-foo-bar-test-1-test-2':
|
||||
'foo-bar-foo-bar-foo-bar-foo-bar-foo-bar-foo-bar-foo-bar-foo-bar-foo-bar-foo-bar-foo-bar-foo-bar-foo-bar-foo-bar-foo-bar-foo-bar-foo-bar-foo-bar-foo-bar-foo-bar-foo-bar-foo-bar-foo-bar-foo-bar-foo-bar-foo-bar-foo-bar-foo-bar-foo-bar-test-1-test-',
|
||||
};
|
||||
Object.keys(asserts).forEach((file) => {
|
||||
expect(shortName(file)).toBe(asserts[file]);
|
||||
});
|
||||
});
|
||||
|
||||
// Based on https://github.com/gatsbyjs/gatsby/pull/21518/files
|
||||
// Based on https://github.com/gatsbyjs/gatsby/pull/21518/files
|
||||
|
||||
const SHORT_PATH = `/short/path/without/trailing/slash`;
|
||||
const VERY_LONG_PATH = `/${`x`.repeat(256)}/`;
|
||||
const VERY_LONG_PATH_NON_LATIN = `/${`あ`.repeat(255)}/`;
|
||||
const SHORT_PATH = `/short/path/without/trailing/slash`;
|
||||
const VERY_LONG_PATH = `/${`x`.repeat(256)}/`;
|
||||
const VERY_LONG_PATH_NON_LATIN = `/${`あ`.repeat(255)}/`;
|
||||
|
||||
it(`Truncates long paths correctly`, () => {
|
||||
const truncatedPathLatin = shortName(VERY_LONG_PATH);
|
||||
const truncatedPathNonLatin = shortName(VERY_LONG_PATH_NON_LATIN);
|
||||
expect(truncatedPathLatin.length).toBeLessThanOrEqual(255);
|
||||
expect(truncatedPathNonLatin.length).toBeLessThanOrEqual(255);
|
||||
});
|
||||
test('Truncates long paths correctly', () => {
|
||||
const truncatedPathLatin = shortName(VERY_LONG_PATH);
|
||||
const truncatedPathNonLatin = shortName(VERY_LONG_PATH_NON_LATIN);
|
||||
expect(truncatedPathLatin.length).toBeLessThanOrEqual(255);
|
||||
expect(truncatedPathNonLatin.length).toBeLessThanOrEqual(255);
|
||||
});
|
||||
|
||||
it(`Does not truncate short paths`, () => {
|
||||
const truncatedPath = shortName(SHORT_PATH);
|
||||
expect(truncatedPath).toEqual(SHORT_PATH);
|
||||
test('Does not truncate short paths', () => {
|
||||
const truncatedPath = shortName(SHORT_PATH);
|
||||
expect(truncatedPath).toEqual(SHORT_PATH);
|
||||
});
|
||||
});
|
||||
|
||||
describe('escapePath', () => {
|
||||
test('escapePath works', () => {
|
||||
const asserts: Record<string, string> = {
|
||||
'c:/aaaa\\bbbb': 'c:/aaaa\\\\bbbb',
|
||||
'c:\\aaaa\\bbbb\\★': 'c:\\\\aaaa\\\\bbbb\\\\★',
|
||||
'\\\\?\\c:\\aaaa\\bbbb': '\\\\\\\\?\\\\c:\\\\aaaa\\\\bbbb',
|
||||
'c:\\aaaa\\bbbb': 'c:\\\\aaaa\\\\bbbb',
|
||||
'foo\\bar': 'foo\\\\bar',
|
||||
'foo\\bar/lol': 'foo\\\\bar/lol',
|
||||
'website\\docs/**/*.{md,mdx}': 'website\\\\docs/**/*.{md,mdx}',
|
||||
};
|
||||
Object.keys(asserts).forEach((file) => {
|
||||
expect(escapePath(file)).toBe(asserts[file]);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('posixPath', () => {
|
||||
test('posixPath works', () => {
|
||||
const asserts: Record<string, string> = {
|
||||
'c:/aaaa\\bbbb': 'c:/aaaa/bbbb',
|
||||
'c:\\aaaa\\bbbb\\★': 'c:\\aaaa\\bbbb\\★',
|
||||
'\\\\?\\c:\\aaaa\\bbbb': '\\\\?\\c:\\aaaa\\bbbb',
|
||||
'c:\\aaaa\\bbbb': 'c:/aaaa/bbbb',
|
||||
'foo\\bar': 'foo/bar',
|
||||
'foo\\bar/lol': 'foo/bar/lol',
|
||||
'website\\docs/**/*.{md,mdx}': 'website/docs/**/*.{md,mdx}',
|
||||
};
|
||||
Object.keys(asserts).forEach((file) => {
|
||||
expect(posixPath(file)).toBe(asserts[file]);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('aliasedSitePath', () => {
|
||||
test('behaves correctly', () => {
|
||||
const asserts: Record<string, string> = {
|
||||
'user/website/docs/asd.md': '@site/docs/asd.md',
|
||||
'user/website/versioned_docs/foo/bar.md':
|
||||
'@site/versioned_docs/foo/bar.md',
|
||||
'user/docs/test.md': '@site/../docs/test.md',
|
||||
};
|
||||
Object.keys(asserts).forEach((file) => {
|
||||
expect(posixPath(aliasedSitePath(file, 'user/website'))).toBe(
|
||||
asserts[file],
|
||||
);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
|
|
@ -1,25 +0,0 @@
|
|||
/**
|
||||
* Copyright (c) Facebook, Inc. and its affiliates.
|
||||
*
|
||||
* This source code is licensed under the MIT license found in the
|
||||
* LICENSE file in the root directory of this source tree.
|
||||
*/
|
||||
|
||||
import {posixPath} from '../posixPath';
|
||||
|
||||
describe('posixPath', () => {
|
||||
test('posixPath works', () => {
|
||||
const asserts: Record<string, string> = {
|
||||
'c:/aaaa\\bbbb': 'c:/aaaa/bbbb',
|
||||
'c:\\aaaa\\bbbb\\★': 'c:\\aaaa\\bbbb\\★',
|
||||
'\\\\?\\c:\\aaaa\\bbbb': '\\\\?\\c:\\aaaa\\bbbb',
|
||||
'c:\\aaaa\\bbbb': 'c:/aaaa/bbbb',
|
||||
'foo\\bar': 'foo/bar',
|
||||
'foo\\bar/lol': 'foo/bar/lol',
|
||||
'website\\docs/**/*.{md,mdx}': 'website/docs/**/*.{md,mdx}',
|
||||
};
|
||||
Object.keys(asserts).forEach((file) => {
|
||||
expect(posixPath(file)).toBe(asserts[file]);
|
||||
});
|
||||
});
|
||||
});
|
|
@ -5,7 +5,7 @@
|
|||
* LICENSE file in the root directory of this source tree.
|
||||
*/
|
||||
|
||||
import {normalizeUrl} from '../normalizeUrl';
|
||||
import {normalizeUrl} from '../urlUtils';
|
||||
|
||||
describe('normalizeUrl', () => {
|
||||
test('should normalize urls correctly', () => {
|
93
packages/docusaurus-utils/src/dataFileUtils.ts
Normal file
93
packages/docusaurus-utils/src/dataFileUtils.ts
Normal file
|
@ -0,0 +1,93 @@
|
|||
/**
|
||||
* Copyright (c) Facebook, Inc. and its affiliates.
|
||||
*
|
||||
* This source code is licensed under the MIT license found in the
|
||||
* LICENSE file in the root directory of this source tree.
|
||||
*/
|
||||
|
||||
import fs from 'fs-extra';
|
||||
import Yaml from 'js-yaml';
|
||||
import path from 'path';
|
||||
import {findAsyncSequential} from './index';
|
||||
import type {ContentPaths} from './markdownLinks';
|
||||
import logger from '@docusaurus/logger';
|
||||
|
||||
type DataFileParams = {
|
||||
filePath: string;
|
||||
contentPaths: ContentPaths;
|
||||
};
|
||||
|
||||
export async function getDataFilePath({
|
||||
filePath,
|
||||
contentPaths,
|
||||
}: DataFileParams): Promise<string | undefined> {
|
||||
// Loads a localized data file in priority
|
||||
const contentPath = await findFolderContainingFile(
|
||||
getContentPathList(contentPaths),
|
||||
filePath,
|
||||
);
|
||||
if (contentPath) {
|
||||
return path.join(contentPath, filePath);
|
||||
}
|
||||
return undefined;
|
||||
}
|
||||
|
||||
/**
|
||||
* Looks up for a data file in the content paths, returns the normalized object.
|
||||
* Throws when validation fails; returns undefined when file not found
|
||||
*/
|
||||
export async function getDataFileData<T>(
|
||||
params: DataFileParams & {fileType: string},
|
||||
validate: (content: unknown) => T,
|
||||
): Promise<T | undefined> {
|
||||
const filePath = await getDataFilePath(params);
|
||||
if (!filePath) {
|
||||
return undefined;
|
||||
}
|
||||
if (await fs.pathExists(filePath)) {
|
||||
try {
|
||||
const contentString = await fs.readFile(filePath, {encoding: 'utf8'});
|
||||
const unsafeContent = Yaml.load(contentString);
|
||||
return validate(unsafeContent);
|
||||
} catch (e) {
|
||||
// TODO replace later by error cause, see https://v8.dev/features/error-cause
|
||||
logger.error`The ${params.fileType} file at path=${filePath} looks invalid.`;
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
return undefined;
|
||||
}
|
||||
|
||||
// Order matters: we look in priority in localized folder
|
||||
export function getContentPathList(contentPaths: ContentPaths): string[] {
|
||||
return [contentPaths.contentPathLocalized, contentPaths.contentPath];
|
||||
}
|
||||
|
||||
// return the first folder path in which the file exists in
|
||||
export async function findFolderContainingFile(
|
||||
folderPaths: string[],
|
||||
relativeFilePath: string,
|
||||
): Promise<string | undefined> {
|
||||
return findAsyncSequential(folderPaths, (folderPath) =>
|
||||
fs.pathExists(path.join(folderPath, relativeFilePath)),
|
||||
);
|
||||
}
|
||||
|
||||
export async function getFolderContainingFile(
|
||||
folderPaths: string[],
|
||||
relativeFilePath: string,
|
||||
): Promise<string> {
|
||||
const maybeFolderPath = await findFolderContainingFile(
|
||||
folderPaths,
|
||||
relativeFilePath,
|
||||
);
|
||||
// should never happen, as the source was read from the FS anyway...
|
||||
if (!maybeFolderPath) {
|
||||
throw new Error(
|
||||
`File "${relativeFilePath}" does not exist in any of these folders:\n- ${folderPaths.join(
|
||||
'\n- ',
|
||||
)}]`,
|
||||
);
|
||||
}
|
||||
return maybeFolderPath;
|
||||
}
|
|
@ -1,23 +0,0 @@
|
|||
/**
|
||||
* Copyright (c) Facebook, Inc. and its affiliates.
|
||||
*
|
||||
* This source code is licensed under the MIT license found in the
|
||||
* LICENSE file in the root directory of this source tree.
|
||||
*/
|
||||
|
||||
/**
|
||||
* When you have a path like C:\X\Y
|
||||
* It is not safe to use directly when generating code
|
||||
* For example, this would fail due to unescaped \: `<img src={require('${filePath}')} />`
|
||||
* But this would work: `<img src={require('${escapePath(filePath)}')} />`
|
||||
*
|
||||
* posixPath can't be used in all cases, because forward slashes are only valid
|
||||
* Windows paths when they don't contain non-ascii characters, and posixPath
|
||||
* doesn't escape those that fail to be converted.
|
||||
*/
|
||||
export function escapePath(str: string): string {
|
||||
const escaped = JSON.stringify(str);
|
||||
|
||||
// Remove the " around the json string;
|
||||
return escaped.substring(1, escaped.length - 1);
|
||||
}
|
|
@ -8,8 +8,7 @@
|
|||
import logger from '@docusaurus/logger';
|
||||
import path from 'path';
|
||||
import {createHash} from 'crypto';
|
||||
import {camelCase, mapValues} from 'lodash';
|
||||
import escapeStringRegexp from 'escape-string-regexp';
|
||||
import {mapValues} from 'lodash';
|
||||
import fs from 'fs-extra';
|
||||
import {URL} from 'url';
|
||||
import {
|
||||
|
@ -20,30 +19,21 @@ import {
|
|||
|
||||
import resolvePathnameUnsafe from 'resolve-pathname';
|
||||
|
||||
import {posixPath as posixPathImport} from './posixPath';
|
||||
import {simpleHash, docuHash} from './hashUtils';
|
||||
import {normalizeUrl} from './normalizeUrl';
|
||||
import {DEFAULT_PLUGIN_ID} from './constants';
|
||||
|
||||
export * from './constants';
|
||||
export * from './mdxUtils';
|
||||
export * from './normalizeUrl';
|
||||
export * from './urlUtils';
|
||||
export * from './tags';
|
||||
|
||||
export const posixPath = posixPathImport;
|
||||
|
||||
export * from './markdownParser';
|
||||
export * from './markdownLinks';
|
||||
export * from './escapePath';
|
||||
export * from './slugger';
|
||||
export {md5Hash, simpleHash, docuHash} from './hashUtils';
|
||||
export {
|
||||
Globby,
|
||||
GlobExcludeDefault,
|
||||
createMatcher,
|
||||
createAbsoluteFilePathMatcher,
|
||||
} from './globUtils';
|
||||
export * from './pathUtils';
|
||||
export * from './hashUtils';
|
||||
export * from './globUtils';
|
||||
export * from './webpackUtils';
|
||||
export * from './dataFileUtils';
|
||||
|
||||
const fileHash = new Map();
|
||||
export async function generate(
|
||||
|
@ -80,18 +70,6 @@ export async function generate(
|
|||
}
|
||||
}
|
||||
|
||||
export function objectWithKeySorted<T>(
|
||||
obj: Record<string, T>,
|
||||
): Record<string, T> {
|
||||
// https://github.com/lodash/lodash/issues/1459#issuecomment-460941233
|
||||
return Object.keys(obj)
|
||||
.sort()
|
||||
.reduce((acc: Record<string, T>, key: string) => {
|
||||
acc[key] = obj[key];
|
||||
return acc;
|
||||
}, {});
|
||||
}
|
||||
|
||||
const indexRE = /(^|.*\/)index\.(md|mdx|js|jsx|ts|tsx)$/i;
|
||||
const extRE = /\.(md|mdx|js|jsx|ts|tsx)$/;
|
||||
|
||||
|
@ -113,37 +91,6 @@ export function encodePath(userpath: string): string {
|
|||
.join('/');
|
||||
}
|
||||
|
||||
/**
|
||||
* Convert first string character to the upper case.
|
||||
* E.g: docusaurus -> Docusaurus
|
||||
*/
|
||||
export function upperFirst(str: string): string {
|
||||
return str ? str.charAt(0).toUpperCase() + str.slice(1) : '';
|
||||
}
|
||||
|
||||
/**
|
||||
* Generate unique React Component Name.
|
||||
* E.g: /foo-bar -> FooBar096
|
||||
*/
|
||||
export function genComponentName(pagePath: string): string {
|
||||
if (pagePath === '/') {
|
||||
return 'index';
|
||||
}
|
||||
const pageHash = docuHash(pagePath);
|
||||
return upperFirst(camelCase(pageHash));
|
||||
}
|
||||
|
||||
// When you want to display a path in a message/warning/error,
|
||||
// it's more convenient to:
|
||||
// - make it relative to cwd()
|
||||
// - convert to posix (ie not using windows \ path separator)
|
||||
// This way, Jest tests can run more reliably on any computer/CI
|
||||
// on both Unix/Windows
|
||||
// For Windows users this is not perfect (as they see / instead of \) but it's probably good enough
|
||||
export function toMessageRelativeFilePath(filePath: string): string {
|
||||
return posixPath(path.relative(process.cwd(), filePath));
|
||||
}
|
||||
|
||||
const chunkNameCache = new Map();
|
||||
/**
|
||||
* Generate unique chunk name given a module path.
|
||||
|
@ -172,52 +119,6 @@ export function genChunkName(
|
|||
return chunkName;
|
||||
}
|
||||
|
||||
// Too dynamic
|
||||
// eslint-disable-next-line @typescript-eslint/explicit-module-boundary-types, @typescript-eslint/no-explicit-any
|
||||
export function idx(target: any, keyPaths?: string | (string | number)[]): any {
|
||||
return (
|
||||
target &&
|
||||
keyPaths &&
|
||||
(Array.isArray(keyPaths)
|
||||
? keyPaths.reduce((obj, key) => obj && obj[key], target)
|
||||
: target[keyPaths])
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
* Given a filepath and dirpath, get the first directory.
|
||||
*/
|
||||
export function getSubFolder(file: string, refDir: string): string | null {
|
||||
const separator = escapeStringRegexp(path.sep);
|
||||
const baseDir = escapeStringRegexp(path.basename(refDir));
|
||||
const regexSubFolder = new RegExp(
|
||||
`${baseDir}${separator}(.*?)${separator}.*`,
|
||||
);
|
||||
const match = regexSubFolder.exec(file);
|
||||
return match && match[1];
|
||||
}
|
||||
|
||||
/**
|
||||
* Alias filepath relative to site directory, very useful so that we
|
||||
* don't expose user's site structure.
|
||||
* Example: some/path/to/website/docs/foo.md -> @site/docs/foo.md
|
||||
*/
|
||||
export function aliasedSitePath(filePath: string, siteDir: string): string {
|
||||
const relativePath = posixPath(path.relative(siteDir, filePath));
|
||||
// Cannot use path.join() as it resolves '../' and removes
|
||||
// the '@site'. Let webpack loader resolve it.
|
||||
return `@site/${relativePath}`;
|
||||
}
|
||||
|
||||
export function getEditUrl(
|
||||
fileRelativePath: string,
|
||||
editUrl?: string,
|
||||
): string | undefined {
|
||||
return editUrl
|
||||
? normalizeUrl([editUrl, posixPath(fileRelativePath)])
|
||||
: undefined;
|
||||
}
|
||||
|
||||
export function isValidPathname(str: string): boolean {
|
||||
if (!str.startsWith('/')) {
|
||||
return false;
|
||||
|
@ -306,7 +207,7 @@ export function getPluginI18nPath({
|
|||
);
|
||||
}
|
||||
|
||||
export async function mapAsyncSequencial<T, R>(
|
||||
export async function mapAsyncSequential<T, R>(
|
||||
array: T[],
|
||||
action: (t: T) => Promise<R>,
|
||||
): Promise<R[]> {
|
||||
|
@ -332,35 +233,6 @@ export async function findAsyncSequential<T>(
|
|||
return undefined;
|
||||
}
|
||||
|
||||
// return the first folder path in which the file exists in
|
||||
export async function findFolderContainingFile(
|
||||
folderPaths: string[],
|
||||
relativeFilePath: string,
|
||||
): Promise<string | undefined> {
|
||||
return findAsyncSequential(folderPaths, (folderPath) =>
|
||||
fs.pathExists(path.join(folderPath, relativeFilePath)),
|
||||
);
|
||||
}
|
||||
|
||||
export async function getFolderContainingFile(
|
||||
folderPaths: string[],
|
||||
relativeFilePath: string,
|
||||
): Promise<string> {
|
||||
const maybeFolderPath = await findFolderContainingFile(
|
||||
folderPaths,
|
||||
relativeFilePath,
|
||||
);
|
||||
// should never happen, as the source was read from the FS anyway...
|
||||
if (!maybeFolderPath) {
|
||||
throw new Error(
|
||||
`File "${relativeFilePath}" does not exist in any of these folders:\n- ${folderPaths.join(
|
||||
'\n- ',
|
||||
)}]`,
|
||||
);
|
||||
}
|
||||
return maybeFolderPath;
|
||||
}
|
||||
|
||||
export function reportMessage(
|
||||
message: string,
|
||||
reportingSeverity: ReportingSeverity,
|
||||
|
@ -420,21 +292,3 @@ export function updateTranslationFileMessages(
|
|||
})),
|
||||
};
|
||||
}
|
||||
|
||||
// Input: ## Some heading {#some-heading}
|
||||
// Output: {text: "## Some heading", id: "some-heading"}
|
||||
export function parseMarkdownHeadingId(heading: string): {
|
||||
text: string;
|
||||
id?: string;
|
||||
} {
|
||||
const customHeadingIdRegex = /^(.*?)\s*\{#([\w-]+)\}$/;
|
||||
const matches = customHeadingIdRegex.exec(heading);
|
||||
if (matches) {
|
||||
return {
|
||||
text: matches[1],
|
||||
id: matches[2],
|
||||
};
|
||||
} else {
|
||||
return {text: heading, id: undefined};
|
||||
}
|
||||
}
|
||||
|
|
|
@ -6,7 +6,7 @@
|
|||
*/
|
||||
|
||||
import path from 'path';
|
||||
import {aliasedSitePath} from './index';
|
||||
import {aliasedSitePath} from './pathUtils';
|
||||
|
||||
export type ContentPaths = {
|
||||
contentPath: string;
|
||||
|
|
|
@ -9,6 +9,24 @@ import logger from '@docusaurus/logger';
|
|||
import fs from 'fs-extra';
|
||||
import matter from 'gray-matter';
|
||||
|
||||
// Input: ## Some heading {#some-heading}
|
||||
// Output: {text: "## Some heading", id: "some-heading"}
|
||||
export function parseMarkdownHeadingId(heading: string): {
|
||||
text: string;
|
||||
id?: string;
|
||||
} {
|
||||
const customHeadingIdRegex = /^(.*?)\s*\{#([\w-]+)\}$/;
|
||||
const matches = customHeadingIdRegex.exec(heading);
|
||||
if (matches) {
|
||||
return {
|
||||
text: matches[1],
|
||||
id: matches[2],
|
||||
};
|
||||
} else {
|
||||
return {text: heading, id: undefined};
|
||||
}
|
||||
}
|
||||
|
||||
// Hacky way of stripping out import statements from the excerpt
|
||||
// TODO: Find a better way to do so, possibly by compiling the Markdown content,
|
||||
// stripping out HTML tags and obtaining the first line.
|
||||
|
|
|
@ -7,6 +7,8 @@
|
|||
|
||||
// Based on https://github.com/gatsbyjs/gatsby/pull/21518/files
|
||||
|
||||
import path from 'path';
|
||||
|
||||
// MacOS (APFS) and Windows (NTFS) filename length limit = 255 chars, Others = 255 bytes
|
||||
const MAX_PATH_SEGMENT_CHARS = 255;
|
||||
const MAX_PATH_SEGMENT_BYTES = 255;
|
||||
|
@ -39,3 +41,66 @@ export const shortName = (str: string): string => {
|
|||
)
|
||||
.toString();
|
||||
};
|
||||
|
||||
/**
|
||||
* Convert Windows backslash paths to posix style paths.
|
||||
* E.g: endi\lie -> endi/lie
|
||||
*
|
||||
* Returns original path if the posix counterpart is not valid Windows path.
|
||||
* This makes the legacy code that uses posixPath safe; but also makes it less
|
||||
* useful when you actually want a path with forward slashes (e.g. for URL)
|
||||
*
|
||||
* Adopted from https://github.com/sindresorhus/slash/blob/main/index.js
|
||||
*/
|
||||
export function posixPath(str: string): string {
|
||||
const isExtendedLengthPath = /^\\\\\?\\/.test(str);
|
||||
|
||||
// Forward slashes are only valid Windows paths when they don't contain non-ascii characters.
|
||||
// eslint-disable-next-line no-control-regex
|
||||
const hasNonAscii = /[^\u0000-\u0080]+/.test(str);
|
||||
|
||||
if (isExtendedLengthPath || hasNonAscii) {
|
||||
return str;
|
||||
}
|
||||
return str.replace(/\\/g, '/');
|
||||
}
|
||||
|
||||
// When you want to display a path in a message/warning/error,
|
||||
// it's more convenient to:
|
||||
// - make it relative to cwd()
|
||||
// - convert to posix (ie not using windows \ path separator)
|
||||
// This way, Jest tests can run more reliably on any computer/CI
|
||||
// on both Unix/Windows
|
||||
// For Windows users this is not perfect (as they see / instead of \) but it's probably good enough
|
||||
export function toMessageRelativeFilePath(filePath: string): string {
|
||||
return posixPath(path.relative(process.cwd(), filePath));
|
||||
}
|
||||
|
||||
/**
|
||||
* Alias filepath relative to site directory, very useful so that we
|
||||
* don't expose user's site structure.
|
||||
* Example: some/path/to/website/docs/foo.md -> @site/docs/foo.md
|
||||
*/
|
||||
export function aliasedSitePath(filePath: string, siteDir: string): string {
|
||||
const relativePath = posixPath(path.relative(siteDir, filePath));
|
||||
// Cannot use path.join() as it resolves '../' and removes
|
||||
// the '@site'. Let webpack loader resolve it.
|
||||
return `@site/${relativePath}`;
|
||||
}
|
||||
|
||||
/**
|
||||
* When you have a path like C:\X\Y
|
||||
* It is not safe to use directly when generating code
|
||||
* For example, this would fail due to unescaped \: `<img src={require('${filePath}')} />`
|
||||
* But this would work: `<img src={require('${escapePath(filePath)}')} />`
|
||||
*
|
||||
* posixPath can't be used in all cases, because forward slashes are only valid
|
||||
* Windows paths when they don't contain non-ascii characters, and posixPath
|
||||
* doesn't escape those that fail to be converted.
|
||||
*/
|
||||
export function escapePath(str: string): string {
|
||||
const escaped = JSON.stringify(str);
|
||||
|
||||
// Remove the " around the json string;
|
||||
return escaped.substring(1, escaped.length - 1);
|
||||
}
|
||||
|
|
|
@ -1,29 +0,0 @@
|
|||
/**
|
||||
* Copyright (c) Facebook, Inc. and its affiliates.
|
||||
*
|
||||
* This source code is licensed under the MIT license found in the
|
||||
* LICENSE file in the root directory of this source tree.
|
||||
*/
|
||||
|
||||
/**
|
||||
* Convert Windows backslash paths to posix style paths.
|
||||
* E.g: endi\lie -> endi/lie
|
||||
*
|
||||
* Returns original path if the posix counterpart is not valid Windows path.
|
||||
* This makes the legacy code that uses posixPath safe; but also makes it less
|
||||
* useful when you actually want a path with forward slashes (e.g. for URL)
|
||||
*
|
||||
* Adopted from https://github.com/sindresorhus/slash/blob/main/index.js
|
||||
*/
|
||||
export function posixPath(str: string): string {
|
||||
const isExtendedLengthPath = /^\\\\\?\\/.test(str);
|
||||
|
||||
// Forward slashes are only valid Windows paths when they don't contain non-ascii characters.
|
||||
// eslint-disable-next-line no-control-regex
|
||||
const hasNonAscii = /[^\u0000-\u0080]+/.test(str);
|
||||
|
||||
if (isExtendedLengthPath || hasNonAscii) {
|
||||
return str;
|
||||
}
|
||||
return str.replace(/\\/g, '/');
|
||||
}
|
|
@ -6,7 +6,7 @@
|
|||
*/
|
||||
|
||||
import {kebabCase, uniq, uniqBy} from 'lodash';
|
||||
import {normalizeUrl} from './normalizeUrl';
|
||||
import {normalizeUrl} from './urlUtils';
|
||||
|
||||
export type Tag = {
|
||||
label: string;
|
||||
|
|
|
@ -78,3 +78,13 @@ export function normalizeUrl(rawUrls: string[]): string {
|
|||
|
||||
return str;
|
||||
}
|
||||
|
||||
export function getEditUrl(
|
||||
fileRelativePath: string,
|
||||
editUrl?: string,
|
||||
): string | undefined {
|
||||
return editUrl
|
||||
? // Don't use posixPath for this: we need to force a forward slash path
|
||||
normalizeUrl([editUrl, fileRelativePath.replace(/\\/g, '/')])
|
||||
: undefined;
|
||||
}
|
|
@ -7,7 +7,7 @@
|
|||
|
||||
import type {RuleSetRule} from 'webpack';
|
||||
import path from 'path';
|
||||
import {escapePath} from './escapePath';
|
||||
import {escapePath} from './pathUtils';
|
||||
import {
|
||||
WEBPACK_URL_LOADER_LIMIT,
|
||||
OUTPUT_STATIC_ASSETS_DIR_NAME,
|
||||
|
|
|
@ -26,7 +26,7 @@ import {
|
|||
} from '../webpack/utils';
|
||||
import CleanWebpackPlugin from '../webpack/plugins/CleanWebpackPlugin';
|
||||
import {loadI18n} from '../server/i18n';
|
||||
import {mapAsyncSequencial} from '@docusaurus/utils';
|
||||
import {mapAsyncSequential} from '@docusaurus/utils';
|
||||
|
||||
export default async function build(
|
||||
siteDir: string,
|
||||
|
@ -82,7 +82,7 @@ export default async function build(
|
|||
...i18n.locales.filter((locale) => locale !== i18n.defaultLocale),
|
||||
];
|
||||
|
||||
const results = await mapAsyncSequencial(orderedLocales, (locale) => {
|
||||
const results = await mapAsyncSequential(orderedLocales, (locale) => {
|
||||
const isLastLocale =
|
||||
orderedLocales.indexOf(locale) === orderedLocales.length - 1;
|
||||
return tryToBuildLocale({locale, isLastLocale});
|
||||
|
|
|
@ -176,7 +176,7 @@ const config = {
|
|||
max: 1030, // max resized image's size.
|
||||
min: 640, // min resized image's size. if original is lower, use that size.
|
||||
steps: 2, // the max number of images generated between min and max (inclusive)
|
||||
disableInDev: false,
|
||||
// disableInDev: false,
|
||||
},
|
||||
],
|
||||
[
|
||||
|
|
|
@ -15426,10 +15426,10 @@ prepend-http@^2.0.0:
|
|||
resolved "https://registry.yarnpkg.com/prepend-http/-/prepend-http-2.0.0.tgz#e92434bfa5ea8c19f41cdfd401d741a3c819d897"
|
||||
integrity sha1-6SQ0v6XqjBn0HN/UAddBo8gZ2Jc=
|
||||
|
||||
prettier@^2.5.0:
|
||||
version "2.5.0"
|
||||
resolved "https://registry.yarnpkg.com/prettier/-/prettier-2.5.0.tgz#a6370e2d4594e093270419d9cc47f7670488f893"
|
||||
integrity sha512-FM/zAKgWTxj40rH03VxzIPdXmj39SwSjwG0heUcNFwI+EMZJnY93yAiKXM3dObIKAM5TA88werc8T/EwhB45eg==
|
||||
prettier@^2.5.1:
|
||||
version "2.5.1"
|
||||
resolved "https://registry.yarnpkg.com/prettier/-/prettier-2.5.1.tgz#fff75fa9d519c54cf0fce328c1017d94546bc56a"
|
||||
integrity sha512-vBZcPRUR5MZJwoyi3ZoyQlc1rXeEck8KgeC9AwwOn+exuxLxq5toTRDTSaVrXHxelDMHy9zlicw8u66yxoSUFg==
|
||||
|
||||
pretty-bytes@^5.3.0:
|
||||
version "5.6.0"
|
||||
|
|
Loading…
Add table
Reference in a new issue