mirror of
https://github.com/facebook/docusaurus.git
synced 2025-05-10 15:47:23 +02:00
feat(utils): JSDoc for all APIs (#6980)
* feat(utils): JSDoc for all APIs * fix tests
This commit is contained in:
parent
b8d2a4e84d
commit
2eeb0e46a2
31 changed files with 637 additions and 255 deletions
|
@ -5,10 +5,7 @@
|
||||||
* LICENSE file in the root directory of this source tree.
|
* LICENSE file in the root directory of this source tree.
|
||||||
*/
|
*/
|
||||||
|
|
||||||
import type {
|
import type {BrokenMarkdownLink, ContentPaths} from '@docusaurus/utils';
|
||||||
BrokenMarkdownLink,
|
|
||||||
ContentPaths,
|
|
||||||
} from '@docusaurus/utils/lib/markdownLinks';
|
|
||||||
import type {BlogPostMetadata} from '@docusaurus/plugin-content-blog';
|
import type {BlogPostMetadata} from '@docusaurus/plugin-content-blog';
|
||||||
import type {Metadata as BlogPaginatedMetadata} from '@theme/BlogListPage';
|
import type {Metadata as BlogPaginatedMetadata} from '@theme/BlogListPage';
|
||||||
|
|
||||||
|
|
|
@ -13,7 +13,7 @@ import type {
|
||||||
DocsMarkdownOption,
|
DocsMarkdownOption,
|
||||||
SourceToPermalink,
|
SourceToPermalink,
|
||||||
VersionMetadata,
|
VersionMetadata,
|
||||||
BrokenMarkdownLink,
|
DocBrokenMarkdownLink,
|
||||||
} from '../../types';
|
} from '../../types';
|
||||||
import {VERSIONED_DOCS_DIR, CURRENT_VERSION_NAME} from '../../constants';
|
import {VERSIONED_DOCS_DIR, CURRENT_VERSION_NAME} from '../../constants';
|
||||||
|
|
||||||
|
@ -156,22 +156,22 @@ describe('linkify', () => {
|
||||||
filePath: doc5,
|
filePath: doc5,
|
||||||
link: 'docNotExist1.md',
|
link: 'docNotExist1.md',
|
||||||
contentPaths: versionCurrent,
|
contentPaths: versionCurrent,
|
||||||
} as BrokenMarkdownLink);
|
} as DocBrokenMarkdownLink);
|
||||||
expect(onBrokenMarkdownLink).toHaveBeenNthCalledWith(2, {
|
expect(onBrokenMarkdownLink).toHaveBeenNthCalledWith(2, {
|
||||||
filePath: doc5,
|
filePath: doc5,
|
||||||
link: './docNotExist2.mdx',
|
link: './docNotExist2.mdx',
|
||||||
contentPaths: versionCurrent,
|
contentPaths: versionCurrent,
|
||||||
} as BrokenMarkdownLink);
|
} as DocBrokenMarkdownLink);
|
||||||
expect(onBrokenMarkdownLink).toHaveBeenNthCalledWith(3, {
|
expect(onBrokenMarkdownLink).toHaveBeenNthCalledWith(3, {
|
||||||
filePath: doc5,
|
filePath: doc5,
|
||||||
link: '../docNotExist3.mdx',
|
link: '../docNotExist3.mdx',
|
||||||
contentPaths: versionCurrent,
|
contentPaths: versionCurrent,
|
||||||
} as BrokenMarkdownLink);
|
} as DocBrokenMarkdownLink);
|
||||||
expect(onBrokenMarkdownLink).toHaveBeenNthCalledWith(4, {
|
expect(onBrokenMarkdownLink).toHaveBeenNthCalledWith(4, {
|
||||||
filePath: doc5,
|
filePath: doc5,
|
||||||
link: './subdir/docNotExist4.md',
|
link: './subdir/docNotExist4.md',
|
||||||
contentPaths: versionCurrent,
|
contentPaths: versionCurrent,
|
||||||
} as BrokenMarkdownLink);
|
} as DocBrokenMarkdownLink);
|
||||||
});
|
});
|
||||||
|
|
||||||
it('transforms absolute links in versioned docs', async () => {
|
it('transforms absolute links in versioned docs', async () => {
|
||||||
|
|
|
@ -20,7 +20,7 @@ import type {
|
||||||
} from './types';
|
} from './types';
|
||||||
|
|
||||||
import _ from 'lodash';
|
import _ from 'lodash';
|
||||||
import {getElementsAround, toMessageRelativeFilePath} from '@docusaurus/utils';
|
import {toMessageRelativeFilePath} from '@docusaurus/utils';
|
||||||
import type {DocMetadataBase, DocNavLink} from '../types';
|
import type {DocMetadataBase, DocNavLink} from '../types';
|
||||||
|
|
||||||
export function isCategoriesShorthand(
|
export function isCategoriesShorthand(
|
||||||
|
@ -225,11 +225,11 @@ export function createSidebarsUtils(sidebars: Sidebars): SidebarsUtils {
|
||||||
return {sidebarName, next: undefined, previous: undefined};
|
return {sidebarName, next: undefined, previous: undefined};
|
||||||
}
|
}
|
||||||
|
|
||||||
const {previous, next} = getElementsAround(
|
return {
|
||||||
navigationItems,
|
sidebarName,
|
||||||
currentItemIndex,
|
previous: navigationItems[currentItemIndex - 1],
|
||||||
);
|
next: navigationItems[currentItemIndex + 1],
|
||||||
return {sidebarName, previous, next};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
function getCategoryGeneratedIndexList(): SidebarItemCategoryWithGeneratedIndex[] {
|
function getCategoryGeneratedIndexList(): SidebarItemCategoryWithGeneratedIndex[] {
|
||||||
|
@ -268,11 +268,11 @@ export function createSidebarsUtils(sidebars: Sidebars): SidebarsUtils {
|
||||||
const currentItemIndex = navigationItems.findIndex(
|
const currentItemIndex = navigationItems.findIndex(
|
||||||
isCurrentCategoryGeneratedIndexItem,
|
isCurrentCategoryGeneratedIndexItem,
|
||||||
);
|
);
|
||||||
const {previous, next} = getElementsAround(
|
return {
|
||||||
navigationItems,
|
sidebarName,
|
||||||
currentItemIndex,
|
previous: navigationItems[currentItemIndex - 1],
|
||||||
);
|
next: navigationItems[currentItemIndex + 1],
|
||||||
return {sidebarName, previous, next};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
function checkSidebarsDocIds(validDocIds: string[], sidebarFilePath: string) {
|
function checkSidebarsDocIds(validDocIds: string[], sidebarFilePath: string) {
|
||||||
|
|
|
@ -8,11 +8,12 @@
|
||||||
/// <reference types="@docusaurus/module-type-aliases" />
|
/// <reference types="@docusaurus/module-type-aliases" />
|
||||||
|
|
||||||
import type {Sidebars} from './sidebars/types';
|
import type {Sidebars} from './sidebars/types';
|
||||||
import type {Tag, FrontMatterTag} from '@docusaurus/utils';
|
|
||||||
import type {
|
import type {
|
||||||
BrokenMarkdownLink as IBrokenMarkdownLink,
|
Tag,
|
||||||
|
FrontMatterTag,
|
||||||
|
BrokenMarkdownLink,
|
||||||
ContentPaths,
|
ContentPaths,
|
||||||
} from '@docusaurus/utils/lib/markdownLinks';
|
} from '@docusaurus/utils';
|
||||||
import type {VersionBanner} from '@docusaurus/plugin-content-docs';
|
import type {VersionBanner} from '@docusaurus/plugin-content-docs';
|
||||||
|
|
||||||
export type DocFile = {
|
export type DocFile = {
|
||||||
|
@ -133,11 +134,11 @@ export type LoadedContent = {
|
||||||
loadedVersions: LoadedVersion[];
|
loadedVersions: LoadedVersion[];
|
||||||
};
|
};
|
||||||
|
|
||||||
export type BrokenMarkdownLink = IBrokenMarkdownLink<VersionMetadata>;
|
export type DocBrokenMarkdownLink = BrokenMarkdownLink<VersionMetadata>;
|
||||||
|
|
||||||
export type DocsMarkdownOption = {
|
export type DocsMarkdownOption = {
|
||||||
versionsMetadata: VersionMetadata[];
|
versionsMetadata: VersionMetadata[];
|
||||||
siteDir: string;
|
siteDir: string;
|
||||||
sourceToPermalink: SourceToPermalink;
|
sourceToPermalink: SourceToPermalink;
|
||||||
onBrokenMarkdownLink: (brokenMarkdownLink: BrokenMarkdownLink) => void;
|
onBrokenMarkdownLink: (brokenMarkdownLink: DocBrokenMarkdownLink) => void;
|
||||||
};
|
};
|
||||||
|
|
|
@ -898,7 +898,8 @@ declare module '@theme/TagsListByLetter' {
|
||||||
}
|
}
|
||||||
|
|
||||||
declare module '@theme/TagsListInline' {
|
declare module '@theme/TagsListInline' {
|
||||||
export type Tag = Readonly<{label: string; permalink: string}>;
|
import type {Tag} from '@docusaurus/utils';
|
||||||
|
|
||||||
export interface Props {
|
export interface Props {
|
||||||
readonly tags: readonly Tag[];
|
readonly tags: readonly Tag[];
|
||||||
}
|
}
|
||||||
|
|
|
@ -20,6 +20,7 @@
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"@docusaurus/logger": "2.0.0-beta.17",
|
"@docusaurus/logger": "2.0.0-beta.17",
|
||||||
"@docusaurus/utils": "2.0.0-beta.17",
|
"@docusaurus/utils": "2.0.0-beta.17",
|
||||||
|
"js-yaml": "^4.1.0",
|
||||||
"joi": "^17.6.0",
|
"joi": "^17.6.0",
|
||||||
"tslib": "^2.3.1"
|
"tslib": "^2.3.1"
|
||||||
},
|
},
|
||||||
|
|
|
@ -18,11 +18,13 @@ const JoiFrontMatterString: Joi.Extension = {
|
||||||
return {value};
|
return {value};
|
||||||
},
|
},
|
||||||
};
|
};
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Enhance the default Joi.string() type so that it can convert number to
|
* Enhance the default `Joi.string()` type so that it can convert number to
|
||||||
* strings. If user use front matter "tag: 2021", we shouldn't need to ask her
|
* strings. If user use front matter "tag: 2021", we shouldn't need to ask her
|
||||||
* to write "tag: '2021'". Also yaml tries to convert patterns like "2019-01-01"
|
* to write "tag: '2021'". Also yaml tries to convert patterns like "2019-01-01"
|
||||||
* to dates automatically.
|
* to dates automatically.
|
||||||
|
*
|
||||||
* @see https://github.com/facebook/docusaurus/issues/4642
|
* @see https://github.com/facebook/docusaurus/issues/4642
|
||||||
* @see https://github.com/sideway/joi/issues/1442#issuecomment-823997884
|
* @see https://github.com/sideway/joi/issues/1442#issuecomment-823997884
|
||||||
*/
|
*/
|
||||||
|
|
|
@ -7,8 +7,10 @@
|
||||||
|
|
||||||
import type Joi from './Joi';
|
import type Joi from './Joi';
|
||||||
import logger from '@docusaurus/logger';
|
import logger from '@docusaurus/logger';
|
||||||
|
import Yaml from 'js-yaml';
|
||||||
import {PluginIdSchema} from './validationSchemas';
|
import {PluginIdSchema} from './validationSchemas';
|
||||||
|
|
||||||
|
/** Print warnings returned from Joi validation. */
|
||||||
export function printWarning(warning?: Joi.ValidationError): void {
|
export function printWarning(warning?: Joi.ValidationError): void {
|
||||||
if (warning) {
|
if (warning) {
|
||||||
const warningMessages = warning.details
|
const warningMessages = warning.details
|
||||||
|
@ -18,9 +20,14 @@ export function printWarning(warning?: Joi.ValidationError): void {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* The callback that should be used to validate plugin options. Handles plugin
|
||||||
|
* IDs on a generic level: no matter what the schema declares, this callback
|
||||||
|
* would require a string ID or default to "default".
|
||||||
|
*/
|
||||||
export function normalizePluginOptions<T extends {id?: string}>(
|
export function normalizePluginOptions<T extends {id?: string}>(
|
||||||
schema: Joi.ObjectSchema<T>,
|
schema: Joi.ObjectSchema<T>,
|
||||||
// This allows us to automatically normalize undefined to {id: 'default'}
|
// This allows us to automatically normalize undefined to { id: "default" }
|
||||||
options: Partial<T> = {},
|
options: Partial<T> = {},
|
||||||
): T {
|
): T {
|
||||||
// All plugins can be provided an "id" option (multi-instance support)
|
// All plugins can be provided an "id" option (multi-instance support)
|
||||||
|
@ -41,6 +48,10 @@ export function normalizePluginOptions<T extends {id?: string}>(
|
||||||
return value;
|
return value;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* The callback that should be used to validate theme config. No matter what the
|
||||||
|
* schema declares, this callback would allow unknown attributes.
|
||||||
|
*/
|
||||||
export function normalizeThemeConfig<T>(
|
export function normalizeThemeConfig<T>(
|
||||||
schema: Joi.ObjectSchema<T>,
|
schema: Joi.ObjectSchema<T>,
|
||||||
themeConfig: Partial<T>,
|
themeConfig: Partial<T>,
|
||||||
|
@ -62,6 +73,9 @@ export function normalizeThemeConfig<T>(
|
||||||
return value;
|
return value;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Validate front matter with better error message
|
||||||
|
*/
|
||||||
export function validateFrontMatter<T>(
|
export function validateFrontMatter<T>(
|
||||||
frontMatter: Record<string, unknown>,
|
frontMatter: Record<string, unknown>,
|
||||||
schema: Joi.ObjectSchema<T>,
|
schema: Joi.ObjectSchema<T>,
|
||||||
|
@ -75,13 +89,13 @@ export function validateFrontMatter<T>(
|
||||||
printWarning(warning);
|
printWarning(warning);
|
||||||
|
|
||||||
if (error) {
|
if (error) {
|
||||||
const frontMatterString = JSON.stringify(frontMatter, null, 2);
|
|
||||||
const errorDetails = error.details;
|
const errorDetails = error.details;
|
||||||
const invalidFields = errorDetails.map(({path}) => path).join(', ');
|
const invalidFields = errorDetails.map(({path}) => path).join(', ');
|
||||||
|
|
||||||
logger.error`The following front matter:
|
logger.error`The following front matter:
|
||||||
${logger.yellow(frontMatterString)}
|
---
|
||||||
contains invalid values for field(s): ${logger.yellow(invalidFields)}.
|
${Yaml.dump(frontMatter)}---
|
||||||
|
contains invalid values for field(s): code=${invalidFields}.
|
||||||
${errorDetails.map(({message}) => message)}
|
${errorDetails.map(({message}) => message)}
|
||||||
`;
|
`;
|
||||||
throw error;
|
throw error;
|
||||||
|
|
|
@ -103,7 +103,7 @@ describe('createAbsoluteFilePathMatcher', () => {
|
||||||
expect(() =>
|
expect(() =>
|
||||||
matcher('/bad/path/myDoc.md'),
|
matcher('/bad/path/myDoc.md'),
|
||||||
).toThrowErrorMatchingInlineSnapshot(
|
).toThrowErrorMatchingInlineSnapshot(
|
||||||
`"createAbsoluteFilePathMatcher unexpected error, absoluteFilePath=/bad/path/myDoc.md was not contained in any of the root folders [\\"/_root/docs\\",\\"/root/_docs/\\",\\"/__test__/website/src\\"]"`,
|
`"createAbsoluteFilePathMatcher unexpected error, absoluteFilePath=/bad/path/myDoc.md was not contained in any of the root folders: /_root/docs, /root/_docs/, /__test__/website/src"`,
|
||||||
);
|
);
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
|
@ -9,7 +9,6 @@ import {jest} from '@jest/globals';
|
||||||
import {
|
import {
|
||||||
removeSuffix,
|
removeSuffix,
|
||||||
removePrefix,
|
removePrefix,
|
||||||
getElementsAround,
|
|
||||||
mapAsyncSequential,
|
mapAsyncSequential,
|
||||||
findAsyncSequential,
|
findAsyncSequential,
|
||||||
reportMessage,
|
reportMessage,
|
||||||
|
@ -38,40 +37,6 @@ describe('removePrefix', () => {
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
describe('getElementsAround', () => {
|
|
||||||
it('returns elements around', () => {
|
|
||||||
expect(getElementsAround(['a', 'b', 'c', 'd'], 0)).toEqual({
|
|
||||||
previous: undefined,
|
|
||||||
next: 'b',
|
|
||||||
});
|
|
||||||
expect(getElementsAround(['a', 'b', 'c', 'd'], 1)).toEqual({
|
|
||||||
previous: 'a',
|
|
||||||
next: 'c',
|
|
||||||
});
|
|
||||||
expect(getElementsAround(['a', 'b', 'c', 'd'], 2)).toEqual({
|
|
||||||
previous: 'b',
|
|
||||||
next: 'd',
|
|
||||||
});
|
|
||||||
expect(getElementsAround(['a', 'b', 'c', 'd'], 3)).toEqual({
|
|
||||||
previous: 'c',
|
|
||||||
next: undefined,
|
|
||||||
});
|
|
||||||
});
|
|
||||||
|
|
||||||
it('throws if bad index is provided', () => {
|
|
||||||
expect(() =>
|
|
||||||
getElementsAround(['a', 'b', 'c', 'd'], -1),
|
|
||||||
).toThrowErrorMatchingInlineSnapshot(
|
|
||||||
`"Valid \\"aroundIndex\\" for array (of size 4) are between 0 and 3, but you provided -1."`,
|
|
||||||
);
|
|
||||||
expect(() =>
|
|
||||||
getElementsAround(['a', 'b', 'c', 'd'], 4),
|
|
||||||
).toThrowErrorMatchingInlineSnapshot(
|
|
||||||
`"Valid \\"aroundIndex\\" for array (of size 4) are between 0 and 3, but you provided 4."`,
|
|
||||||
);
|
|
||||||
});
|
|
||||||
});
|
|
||||||
|
|
||||||
describe('mapAsyncSequential', () => {
|
describe('mapAsyncSequential', () => {
|
||||||
function sleep(timeout: number): Promise<void> {
|
function sleep(timeout: number): Promise<void> {
|
||||||
return new Promise((resolve) => {
|
return new Promise((resolve) => {
|
||||||
|
|
|
@ -113,9 +113,13 @@ describe('createExcerpt', () => {
|
||||||
import Component from '@site/src/components/Component'
|
import Component from '@site/src/components/Component'
|
||||||
import './styles.css';
|
import './styles.css';
|
||||||
|
|
||||||
export function ItemCol(props) { return <Item {...props} className={'col col--6 margin-bottom--lg'}/> }
|
export function ItemCol(props) {
|
||||||
|
return <Item {...props} className={'col col--6 margin-bottom--lg'}/>
|
||||||
|
}
|
||||||
|
|
||||||
export function ItemCol(props) { return <Item {...props} className={'col col--6 margin-bottom--lg'}/> };
|
export function ItemCol(props) {
|
||||||
|
return <Item {...props} className={'col col--6 margin-bottom--lg'}/>
|
||||||
|
};
|
||||||
|
|
||||||
Lorem **ipsum** dolor sit \`amet\`[^1], consectetur _adipiscing_ elit. [**Vestibulum**](https://wiktionary.org/wiki/vestibulum) ex urna[^note], ~~molestie~~ et sagittis ut, varius ac justo :wink:.
|
Lorem **ipsum** dolor sit \`amet\`[^1], consectetur _adipiscing_ elit. [**Vestibulum**](https://wiktionary.org/wiki/vestibulum) ex urna[^note], ~~molestie~~ et sagittis ut, varius ac justo :wink:.
|
||||||
|
|
||||||
|
@ -146,6 +150,18 @@ describe('createExcerpt', () => {
|
||||||
`),
|
`),
|
||||||
).toBe('Lorem ipsum dolor sit amet, consectetur adipiscing elit.');
|
).toBe('Lorem ipsum dolor sit amet, consectetur adipiscing elit.');
|
||||||
});
|
});
|
||||||
|
|
||||||
|
it('creates excerpt after multi-line imports', () => {
|
||||||
|
expect(
|
||||||
|
createExcerpt(dedent`
|
||||||
|
import React, {
|
||||||
|
type ReactNode,
|
||||||
|
} from 'react';
|
||||||
|
|
||||||
|
Lorem \`ipsum\` dolor sit amet, consectetur \`adipiscing elit\`.
|
||||||
|
`),
|
||||||
|
).toBe('Lorem ipsum dolor sit amet, consectetur adipiscing elit.');
|
||||||
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
describe('parseMarkdownContentTitle', () => {
|
describe('parseMarkdownContentTitle', () => {
|
||||||
|
|
|
@ -5,62 +5,60 @@
|
||||||
* LICENSE file in the root directory of this source tree.
|
* LICENSE file in the root directory of this source tree.
|
||||||
*/
|
*/
|
||||||
|
|
||||||
import {
|
import {normalizeFrontMatterTags, groupTaggedItems, type Tag} from '../tags';
|
||||||
normalizeFrontMatterTag,
|
|
||||||
normalizeFrontMatterTags,
|
|
||||||
groupTaggedItems,
|
|
||||||
type Tag,
|
|
||||||
} from '../tags';
|
|
||||||
|
|
||||||
describe('normalizeFrontMatterTag', () => {
|
|
||||||
type Input = Parameters<typeof normalizeFrontMatterTag>[1];
|
|
||||||
type Output = ReturnType<typeof normalizeFrontMatterTag>;
|
|
||||||
|
|
||||||
|
describe('normalizeFrontMatterTags', () => {
|
||||||
it('normalizes simple string tag', () => {
|
it('normalizes simple string tag', () => {
|
||||||
const tagsPath = '/all/tags';
|
const tagsPath = '/all/tags';
|
||||||
const input: Input = 'tag';
|
const input = 'tag';
|
||||||
const expectedOutput: Output = {
|
const expectedOutput = {
|
||||||
label: 'tag',
|
label: 'tag',
|
||||||
permalink: `${tagsPath}/tag`,
|
permalink: `${tagsPath}/tag`,
|
||||||
};
|
};
|
||||||
expect(normalizeFrontMatterTag(tagsPath, input)).toEqual(expectedOutput);
|
expect(normalizeFrontMatterTags(tagsPath, [input])).toEqual([
|
||||||
|
expectedOutput,
|
||||||
|
]);
|
||||||
});
|
});
|
||||||
|
|
||||||
it('normalizes complex string tag', () => {
|
it('normalizes complex string tag', () => {
|
||||||
const tagsPath = '/all/tags';
|
const tagsPath = '/all/tags';
|
||||||
const input: Input = 'some more Complex_tag';
|
const input = 'some more Complex_tag';
|
||||||
const expectedOutput: Output = {
|
const expectedOutput = {
|
||||||
label: 'some more Complex_tag',
|
label: 'some more Complex_tag',
|
||||||
permalink: `${tagsPath}/some-more-complex-tag`,
|
permalink: `${tagsPath}/some-more-complex-tag`,
|
||||||
};
|
};
|
||||||
expect(normalizeFrontMatterTag(tagsPath, input)).toEqual(expectedOutput);
|
expect(normalizeFrontMatterTags(tagsPath, [input])).toEqual([
|
||||||
|
expectedOutput,
|
||||||
|
]);
|
||||||
});
|
});
|
||||||
|
|
||||||
it('normalizes simple object tag', () => {
|
it('normalizes simple object tag', () => {
|
||||||
const tagsPath = '/all/tags';
|
const tagsPath = '/all/tags';
|
||||||
const input: Input = {label: 'tag', permalink: 'tagPermalink'};
|
const input = {label: 'tag', permalink: 'tagPermalink'};
|
||||||
const expectedOutput: Output = {
|
const expectedOutput = {
|
||||||
label: 'tag',
|
label: 'tag',
|
||||||
permalink: `${tagsPath}/tagPermalink`,
|
permalink: `${tagsPath}/tagPermalink`,
|
||||||
};
|
};
|
||||||
expect(normalizeFrontMatterTag(tagsPath, input)).toEqual(expectedOutput);
|
expect(normalizeFrontMatterTags(tagsPath, [input])).toEqual([
|
||||||
|
expectedOutput,
|
||||||
|
]);
|
||||||
});
|
});
|
||||||
|
|
||||||
it('normalizes complex string tag with object tag', () => {
|
it('normalizes complex string tag with object tag', () => {
|
||||||
const tagsPath = '/all/tags';
|
const tagsPath = '/all/tags';
|
||||||
const input: Input = {
|
const input = {
|
||||||
label: 'tag complex Label',
|
label: 'tag complex Label',
|
||||||
permalink: '/MoreComplex/Permalink',
|
permalink: '/MoreComplex/Permalink',
|
||||||
};
|
};
|
||||||
const expectedOutput: Output = {
|
const expectedOutput = {
|
||||||
label: 'tag complex Label',
|
label: 'tag complex Label',
|
||||||
permalink: `${tagsPath}/MoreComplex/Permalink`,
|
permalink: `${tagsPath}/MoreComplex/Permalink`,
|
||||||
};
|
};
|
||||||
expect(normalizeFrontMatterTag(tagsPath, input)).toEqual(expectedOutput);
|
expect(normalizeFrontMatterTags(tagsPath, [input])).toEqual([
|
||||||
});
|
expectedOutput,
|
||||||
|
]);
|
||||||
});
|
});
|
||||||
|
|
||||||
describe('normalizeFrontMatterTags', () => {
|
|
||||||
type Input = Parameters<typeof normalizeFrontMatterTags>[1];
|
type Input = Parameters<typeof normalizeFrontMatterTags>[1];
|
||||||
type Output = ReturnType<typeof normalizeFrontMatterTags>;
|
type Output = ReturnType<typeof normalizeFrontMatterTags>;
|
||||||
|
|
||||||
|
|
|
@ -5,34 +5,86 @@
|
||||||
* LICENSE file in the root directory of this source tree.
|
* LICENSE file in the root directory of this source tree.
|
||||||
*/
|
*/
|
||||||
|
|
||||||
|
/** Node major version, directly read from env. */
|
||||||
export const NODE_MAJOR_VERSION = parseInt(
|
export const NODE_MAJOR_VERSION = parseInt(
|
||||||
process.versions.node.split('.')[0]!,
|
process.versions.node.split('.')[0]!,
|
||||||
10,
|
10,
|
||||||
);
|
);
|
||||||
|
/** Node minor version, directly read from env. */
|
||||||
export const NODE_MINOR_VERSION = parseInt(
|
export const NODE_MINOR_VERSION = parseInt(
|
||||||
process.versions.node.split('.')[1]!,
|
process.versions.node.split('.')[1]!,
|
||||||
10,
|
10,
|
||||||
);
|
);
|
||||||
|
|
||||||
// Can be overridden with cli option --out-dir
|
/**
|
||||||
|
* Can be overridden with cli option `--out-dir`. Code should generally use
|
||||||
|
* `context.outDir` instead (which is always absolute and localized).
|
||||||
|
*/
|
||||||
export const DEFAULT_BUILD_DIR_NAME = 'build';
|
export const DEFAULT_BUILD_DIR_NAME = 'build';
|
||||||
|
|
||||||
// Can be overridden with cli option --config
|
/**
|
||||||
|
* Can be overridden with cli option `--config`. Code should generally use
|
||||||
|
* `context.siteConfigPath` instead (which is always absolute).
|
||||||
|
*/
|
||||||
export const DEFAULT_CONFIG_FILE_NAME = 'docusaurus.config.js';
|
export const DEFAULT_CONFIG_FILE_NAME = 'docusaurus.config.js';
|
||||||
|
|
||||||
|
/** Can be absolute or relative to site directory. */
|
||||||
export const BABEL_CONFIG_FILE_NAME =
|
export const BABEL_CONFIG_FILE_NAME =
|
||||||
process.env.DOCUSAURUS_BABEL_CONFIG_FILE_NAME || 'babel.config.js';
|
process.env.DOCUSAURUS_BABEL_CONFIG_FILE_NAME ?? 'babel.config.js';
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Can be absolute or relative to site directory. Code should generally use
|
||||||
|
* `context.generatedFilesDir` instead (which is always absolute).
|
||||||
|
*/
|
||||||
export const GENERATED_FILES_DIR_NAME =
|
export const GENERATED_FILES_DIR_NAME =
|
||||||
process.env.DOCUSAURUS_GENERATED_FILES_DIR_NAME || '.docusaurus';
|
process.env.DOCUSAURUS_GENERATED_FILES_DIR_NAME ?? '.docusaurus';
|
||||||
|
|
||||||
|
/**
|
||||||
|
* We would assume all of the site's JS code lives in here and not outside.
|
||||||
|
* Relative to the site directory.
|
||||||
|
*/
|
||||||
export const SRC_DIR_NAME = 'src';
|
export const SRC_DIR_NAME = 'src';
|
||||||
export const STATIC_DIR_NAME = 'static';
|
|
||||||
export const OUTPUT_STATIC_ASSETS_DIR_NAME = 'assets'; // files handled by webpack, hashed (can be cached aggressively)
|
/**
|
||||||
|
* Can be overridden with `config.staticDirectories`. Code should use
|
||||||
|
* `context.siteConfig.staticDirectories` instead (which is always absolute).
|
||||||
|
*/
|
||||||
|
export const DEFAULT_STATIC_DIR_NAME = 'static';
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Files here are handled by webpack, hashed (can be cached aggressively).
|
||||||
|
* Relative to the build output folder.
|
||||||
|
*/
|
||||||
|
export const OUTPUT_STATIC_ASSETS_DIR_NAME = 'assets';
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Components in this directory will receive the `@theme` alias and be able to
|
||||||
|
* shadow default theme components.
|
||||||
|
*/
|
||||||
export const THEME_PATH = `${SRC_DIR_NAME}/theme`;
|
export const THEME_PATH = `${SRC_DIR_NAME}/theme`;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* All translation-related data live here, relative to site directory. Content
|
||||||
|
* will be namespaced by locale.
|
||||||
|
*/
|
||||||
|
export const I18N_DIR_NAME = 'i18n';
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Translations for React code.
|
||||||
|
*/
|
||||||
|
export const CODE_TRANSLATIONS_FILE_NAME = 'code.json';
|
||||||
|
|
||||||
|
/** Dev server opens on this port by default. */
|
||||||
export const DEFAULT_PORT = 3000;
|
export const DEFAULT_PORT = 3000;
|
||||||
|
|
||||||
|
/** Default plugin ID. */
|
||||||
export const DEFAULT_PLUGIN_ID = 'default';
|
export const DEFAULT_PLUGIN_ID = 'default';
|
||||||
|
|
||||||
// Temporary fix for https://github.com/facebook/docusaurus/issues/5493
|
/**
|
||||||
|
* Allow overriding the limit after which the url loader will no longer inline
|
||||||
|
* assets.
|
||||||
|
*
|
||||||
|
* @see https://github.com/facebook/docusaurus/issues/5493
|
||||||
|
*/
|
||||||
export const WEBPACK_URL_LOADER_LIMIT =
|
export const WEBPACK_URL_LOADER_LIMIT =
|
||||||
process.env.WEBPACK_URL_LOADER_LIMIT ?? 10000;
|
process.env.WEBPACK_URL_LOADER_LIMIT ?? 10000;
|
||||||
|
|
|
@ -13,15 +13,25 @@ import type {ContentPaths} from './markdownLinks';
|
||||||
import logger from '@docusaurus/logger';
|
import logger from '@docusaurus/logger';
|
||||||
|
|
||||||
type DataFileParams = {
|
type DataFileParams = {
|
||||||
|
/** Path to the potential data file, relative to `contentPaths` */
|
||||||
filePath: string;
|
filePath: string;
|
||||||
|
/**
|
||||||
|
* Includes the base path and localized path, both of which are eligible for
|
||||||
|
* sourcing data files. Both paths should be absolute.
|
||||||
|
*/
|
||||||
contentPaths: ContentPaths;
|
contentPaths: ContentPaths;
|
||||||
};
|
};
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Looks for a data file in the potential content paths; loads a localized data
|
||||||
|
* file in priority.
|
||||||
|
*
|
||||||
|
* @returns An absolute path to the data file, or `undefined` if there isn't one.
|
||||||
|
*/
|
||||||
export async function getDataFilePath({
|
export async function getDataFilePath({
|
||||||
filePath,
|
filePath,
|
||||||
contentPaths,
|
contentPaths,
|
||||||
}: DataFileParams): Promise<string | undefined> {
|
}: DataFileParams): Promise<string | undefined> {
|
||||||
// Loads a localized data file in priority
|
|
||||||
const contentPath = await findFolderContainingFile(
|
const contentPath = await findFolderContainingFile(
|
||||||
getContentPathList(contentPaths),
|
getContentPathList(contentPaths),
|
||||||
filePath,
|
filePath,
|
||||||
|
@ -33,11 +43,17 @@ export async function getDataFilePath({
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Looks up for a data file in the content paths, returns the normalized object.
|
* Looks up for a data file in the content paths, returns the object validated
|
||||||
* Throws when validation fails; returns undefined when file not found
|
* and normalized according to the `validate` callback.
|
||||||
|
*
|
||||||
|
* @returns `undefined` when file not found
|
||||||
|
* @throws Throws when validation fails, displaying a helpful context message.
|
||||||
*/
|
*/
|
||||||
export async function getDataFileData<T>(
|
export async function getDataFileData<T>(
|
||||||
params: DataFileParams & {fileType: string},
|
params: DataFileParams & {
|
||||||
|
/** Used for the "The X file looks invalid" message. */
|
||||||
|
fileType: string;
|
||||||
|
},
|
||||||
validate: (content: unknown) => T,
|
validate: (content: unknown) => T,
|
||||||
): Promise<T | undefined> {
|
): Promise<T | undefined> {
|
||||||
const filePath = await getDataFilePath(params);
|
const filePath = await getDataFilePath(params);
|
||||||
|
@ -54,12 +70,21 @@ export async function getDataFileData<T>(
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Order matters: we look in priority in localized folder
|
/**
|
||||||
|
* Takes the `contentPaths` data structure and returns an ordered path list
|
||||||
|
* indicating their priorities. For all data, we look in the localized folder
|
||||||
|
* in priority.
|
||||||
|
*/
|
||||||
export function getContentPathList(contentPaths: ContentPaths): string[] {
|
export function getContentPathList(contentPaths: ContentPaths): string[] {
|
||||||
return [contentPaths.contentPathLocalized, contentPaths.contentPath];
|
return [contentPaths.contentPathLocalized, contentPaths.contentPath];
|
||||||
}
|
}
|
||||||
|
|
||||||
// return the first folder path in which the file exists in
|
/**
|
||||||
|
* @param folderPaths a list of absolute paths.
|
||||||
|
* @param relativeFilePath file path relative to each `folderPaths`.
|
||||||
|
* @returns the first folder path in which the file exists, or `undefined` if
|
||||||
|
* none is found.
|
||||||
|
*/
|
||||||
export async function findFolderContainingFile(
|
export async function findFolderContainingFile(
|
||||||
folderPaths: string[],
|
folderPaths: string[],
|
||||||
relativeFilePath: string,
|
relativeFilePath: string,
|
||||||
|
@ -69,6 +94,16 @@ export async function findFolderContainingFile(
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Fail-fast alternative to `findFolderContainingFile`.
|
||||||
|
*
|
||||||
|
* @param folderPaths a list of absolute paths.
|
||||||
|
* @param relativeFilePath file path relative to each `folderPaths`.
|
||||||
|
* @returns the first folder path in which the file exists.
|
||||||
|
* @throws Throws if no file can be found. You should use this method only when
|
||||||
|
* you actually know the file exists (e.g. when the `relativeFilePath` is read
|
||||||
|
* with a glob and you are just trying to localize it)
|
||||||
|
*/
|
||||||
export async function getFolderContainingFile(
|
export async function getFolderContainingFile(
|
||||||
folderPaths: string[],
|
folderPaths: string[],
|
||||||
relativeFilePath: string,
|
relativeFilePath: string,
|
||||||
|
@ -77,12 +112,10 @@ export async function getFolderContainingFile(
|
||||||
folderPaths,
|
folderPaths,
|
||||||
relativeFilePath,
|
relativeFilePath,
|
||||||
);
|
);
|
||||||
// should never happen, as the source was read from the FS anyway...
|
|
||||||
if (!maybeFolderPath) {
|
if (!maybeFolderPath) {
|
||||||
throw new Error(
|
throw new Error(
|
||||||
`File "${relativeFilePath}" does not exist in any of these folders:\n- ${folderPaths.join(
|
`File "${relativeFilePath}" does not exist in any of these folders:
|
||||||
'\n- ',
|
- ${folderPaths.join('\n- ')}`,
|
||||||
)}`,
|
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
return maybeFolderPath;
|
return maybeFolderPath;
|
||||||
|
|
|
@ -13,6 +13,16 @@ import {findAsyncSequential} from './jsUtils';
|
||||||
|
|
||||||
const fileHash = new Map<string, string>();
|
const fileHash = new Map<string, string>();
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Outputs a file to the generated files directory. Only writes files if content
|
||||||
|
* differs from cache (for hot reload performance).
|
||||||
|
*
|
||||||
|
* @param generatedFilesDir Absolute path.
|
||||||
|
* @param file Path relative to `generatedFilesDir`.
|
||||||
|
* @param content String content to write.
|
||||||
|
* @param skipCache If `true` (defaults as `true` for production), file is
|
||||||
|
* force-rewritten, skipping cache.
|
||||||
|
*/
|
||||||
export async function generate(
|
export async function generate(
|
||||||
generatedFilesDir: string,
|
generatedFilesDir: string,
|
||||||
file: string,
|
file: string,
|
||||||
|
@ -23,14 +33,21 @@ export async function generate(
|
||||||
|
|
||||||
if (skipCache) {
|
if (skipCache) {
|
||||||
await fs.outputFile(filepath, content);
|
await fs.outputFile(filepath, content);
|
||||||
|
// Cache still needs to be reset, otherwise, writing "A", "B", and "A" where
|
||||||
|
// "B" skips cache will cause the last "A" not be able to overwrite as the
|
||||||
|
// first "A" remains in cache. But if the file never existed in cache, no
|
||||||
|
// need to register it.
|
||||||
|
if (fileHash.get(filepath)) {
|
||||||
|
fileHash.set(filepath, createHash('md5').update(content).digest('hex'));
|
||||||
|
}
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
let lastHash = fileHash.get(filepath);
|
let lastHash = fileHash.get(filepath);
|
||||||
|
|
||||||
// If file already exists but its not in runtime cache yet,
|
// If file already exists but it's not in runtime cache yet, we try to
|
||||||
// we try to calculate the content hash and then compare
|
// calculate the content hash and then compare. This is to avoid unnecessary
|
||||||
// This is to avoid unnecessary overwriting and we can reuse old file.
|
// overwriting and we can reuse old file.
|
||||||
if (!lastHash && (await fs.pathExists(filepath))) {
|
if (!lastHash && (await fs.pathExists(filepath))) {
|
||||||
const lastContent = await fs.readFile(filepath, 'utf8');
|
const lastContent = await fs.readFile(filepath, 'utf8');
|
||||||
lastHash = createHash('md5').update(lastContent).digest('hex');
|
lastHash = createHash('md5').update(lastContent).digest('hex');
|
||||||
|
@ -45,7 +62,7 @@ export async function generate(
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
const chunkNameCache = new Map();
|
const chunkNameCache = new Map<string, string>();
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Generate unique chunk name given a module path.
|
* Generate unique chunk name given a module path.
|
||||||
|
@ -56,7 +73,7 @@ export function genChunkName(
|
||||||
preferredName?: string,
|
preferredName?: string,
|
||||||
shortId: boolean = process.env.NODE_ENV === 'production',
|
shortId: boolean = process.env.NODE_ENV === 'production',
|
||||||
): string {
|
): string {
|
||||||
let chunkName: string | undefined = chunkNameCache.get(modulePath);
|
let chunkName = chunkNameCache.get(modulePath);
|
||||||
if (!chunkName) {
|
if (!chunkName) {
|
||||||
if (shortId) {
|
if (shortId) {
|
||||||
chunkName = simpleHash(modulePath, 8);
|
chunkName = simpleHash(modulePath, 8);
|
||||||
|
@ -82,6 +99,8 @@ export function genChunkName(
|
||||||
* @returns This returns a buffer, which you have to decode string yourself if
|
* @returns This returns a buffer, which you have to decode string yourself if
|
||||||
* needed. (Not always necessary since the output isn't for human consumption
|
* needed. (Not always necessary since the output isn't for human consumption
|
||||||
* anyways, and most HTML manipulation libs accept buffers)
|
* anyways, and most HTML manipulation libs accept buffers)
|
||||||
|
* @throws Throws when the HTML file is not found at any of the potential paths.
|
||||||
|
* This should never happen as it would lead to a 404.
|
||||||
*/
|
*/
|
||||||
export async function readOutputHTMLFile(
|
export async function readOutputHTMLFile(
|
||||||
permalink: string,
|
permalink: string,
|
||||||
|
|
|
@ -8,23 +8,67 @@
|
||||||
import path from 'path';
|
import path from 'path';
|
||||||
import shell from 'shelljs';
|
import shell from 'shelljs';
|
||||||
|
|
||||||
|
/** Custom error thrown when git is not found in `PATH`. */
|
||||||
export class GitNotFoundError extends Error {}
|
export class GitNotFoundError extends Error {}
|
||||||
|
|
||||||
|
/** Custom error thrown when the current file is not tracked by git. */
|
||||||
export class FileNotTrackedError extends Error {}
|
export class FileNotTrackedError extends Error {}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Fetches the git history of a file and returns a relevant commit date.
|
||||||
|
* It gets the commit date instead of author date so that amended commits
|
||||||
|
* can have their dates updated.
|
||||||
|
*
|
||||||
|
* @throws {GitNotFoundError} If git is not found in `PATH`.
|
||||||
|
* @throws {FileNotTrackedError} If the current file is not tracked by git.
|
||||||
|
* @throws Also throws when `git log` exited with non-zero, or when it outputs
|
||||||
|
* unexpected text.
|
||||||
|
*/
|
||||||
export function getFileCommitDate(
|
export function getFileCommitDate(
|
||||||
|
/** Absolute path to the file. */
|
||||||
file: string,
|
file: string,
|
||||||
args: {age?: 'oldest' | 'newest'; includeAuthor?: false},
|
args: {
|
||||||
|
/**
|
||||||
|
* `"oldest"` is the commit that added the file, following renames;
|
||||||
|
* `"newest"` is the last commit that edited the file.
|
||||||
|
*/
|
||||||
|
age?: 'oldest' | 'newest';
|
||||||
|
/** Use `includeAuthor: true` to get the author information as well. */
|
||||||
|
includeAuthor?: false;
|
||||||
|
},
|
||||||
): {
|
): {
|
||||||
|
/** Relevant commit date. */
|
||||||
date: Date;
|
date: Date;
|
||||||
|
/** Timestamp in **seconds**, as returned from git. */
|
||||||
timestamp: number;
|
timestamp: number;
|
||||||
};
|
};
|
||||||
|
/**
|
||||||
|
* Fetches the git history of a file and returns a relevant commit date.
|
||||||
|
* It gets the commit date instead of author date so that amended commits
|
||||||
|
* can have their dates updated.
|
||||||
|
*
|
||||||
|
* @throws {GitNotFoundError} If git is not found in `PATH`.
|
||||||
|
* @throws {FileNotTrackedError} If the current file is not tracked by git.
|
||||||
|
* @throws Also throws when `git log` exited with non-zero, or when it outputs
|
||||||
|
* unexpected text.
|
||||||
|
*/
|
||||||
export function getFileCommitDate(
|
export function getFileCommitDate(
|
||||||
|
/** Absolute path to the file. */
|
||||||
file: string,
|
file: string,
|
||||||
args: {age?: 'oldest' | 'newest'; includeAuthor: true},
|
args: {
|
||||||
|
/**
|
||||||
|
* `"oldest"` is the commit that added the file, following renames;
|
||||||
|
* `"newest"` is the last commit that edited the file.
|
||||||
|
*/
|
||||||
|
age?: 'oldest' | 'newest';
|
||||||
|
includeAuthor: true;
|
||||||
|
},
|
||||||
): {
|
): {
|
||||||
|
/** Relevant commit date. */
|
||||||
date: Date;
|
date: Date;
|
||||||
|
/** Timestamp in **seconds**, as returned from git. */
|
||||||
timestamp: number;
|
timestamp: number;
|
||||||
|
/** The author's name, as returned from git. */
|
||||||
author: string;
|
author: string;
|
||||||
};
|
};
|
||||||
export function getFileCommitDate(
|
export function getFileCommitDate(
|
||||||
|
@ -53,8 +97,6 @@ export function getFileCommitDate(
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
// Commit time and author name; not using author time so that amended commits
|
|
||||||
// can have their dates updated
|
|
||||||
let formatArg = '--format=%ct';
|
let formatArg = '--format=%ct';
|
||||||
if (includeAuthor) {
|
if (includeAuthor) {
|
||||||
formatArg += ',%an';
|
formatArg += ',%an';
|
||||||
|
|
|
@ -10,24 +10,31 @@
|
||||||
import Micromatch from 'micromatch'; // Note: Micromatch is used by Globby
|
import Micromatch from 'micromatch'; // Note: Micromatch is used by Globby
|
||||||
import path from 'path';
|
import path from 'path';
|
||||||
|
|
||||||
|
/** A re-export of the globby instance. */
|
||||||
export {default as Globby} from 'globby';
|
export {default as Globby} from 'globby';
|
||||||
|
|
||||||
// The default patterns we ignore when globbing
|
/**
|
||||||
// using _ prefix for exclusion by convention
|
* The default glob patterns we ignore when sourcing content.
|
||||||
|
* - Ignore files and folders starting with `_` recursively
|
||||||
|
* - Ignore tests
|
||||||
|
*/
|
||||||
export const GlobExcludeDefault = [
|
export const GlobExcludeDefault = [
|
||||||
// Ignore files starting with _
|
|
||||||
'**/_*.{js,jsx,ts,tsx,md,mdx}',
|
'**/_*.{js,jsx,ts,tsx,md,mdx}',
|
||||||
|
|
||||||
// Ignore folders starting with _ (including folder content)
|
|
||||||
'**/_*/**',
|
'**/_*/**',
|
||||||
|
|
||||||
// Ignore tests
|
|
||||||
'**/*.test.{js,jsx,ts,tsx}',
|
'**/*.test.{js,jsx,ts,tsx}',
|
||||||
'**/__tests__/**',
|
'**/__tests__/**',
|
||||||
];
|
];
|
||||||
|
|
||||||
type Matcher = (str: string) => boolean;
|
type Matcher = (str: string) => boolean;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* A very thin wrapper around `Micromatch.makeRe`.
|
||||||
|
*
|
||||||
|
* @see {@link createAbsoluteFilePathMatcher}
|
||||||
|
* @param patterns A list of glob patterns.
|
||||||
|
* @returns A matcher handle that tells if a file path is matched by any of the
|
||||||
|
* patterns.
|
||||||
|
*/
|
||||||
export function createMatcher(patterns: string[]): Matcher {
|
export function createMatcher(patterns: string[]): Matcher {
|
||||||
const regexp = new RegExp(
|
const regexp = new RegExp(
|
||||||
patterns.map((pattern) => Micromatch.makeRe(pattern).source).join('|'),
|
patterns.map((pattern) => Micromatch.makeRe(pattern).source).join('|'),
|
||||||
|
@ -35,10 +42,19 @@ export function createMatcher(patterns: string[]): Matcher {
|
||||||
return (str) => regexp.test(str);
|
return (str) => regexp.test(str);
|
||||||
}
|
}
|
||||||
|
|
||||||
// We use match patterns like '**/_*/**',
|
/**
|
||||||
// This function permits to help to:
|
* We use match patterns like `"** /_* /**"` (ignore the spaces), where `"_*"`
|
||||||
// Match /user/sebastien/website/docs/_partials/xyz.md
|
* should only be matched within a subfolder. This function would:
|
||||||
// Ignore /user/_sebastien/website/docs/partials/xyz.md
|
* - Match `/user/sebastien/website/docs/_partials/xyz.md`
|
||||||
|
* - Ignore `/user/_sebastien/website/docs/partials/xyz.md`
|
||||||
|
*
|
||||||
|
* @param patterns A list of glob patterns.
|
||||||
|
* @param rootFolders A list of root folders to resolve the glob from.
|
||||||
|
* @returns A matcher handle that tells if a file path is matched by any of the
|
||||||
|
* patterns, resolved from the first root folder that contains the path.
|
||||||
|
* @throws Throws when the returned matcher receives a path that doesn't belong
|
||||||
|
* to any of the `rootFolders`.
|
||||||
|
*/
|
||||||
export function createAbsoluteFilePathMatcher(
|
export function createAbsoluteFilePathMatcher(
|
||||||
patterns: string[],
|
patterns: string[],
|
||||||
rootFolders: string[],
|
rootFolders: string[],
|
||||||
|
@ -51,8 +67,8 @@ export function createAbsoluteFilePathMatcher(
|
||||||
);
|
);
|
||||||
if (!rootFolder) {
|
if (!rootFolder) {
|
||||||
throw new Error(
|
throw new Error(
|
||||||
`createAbsoluteFilePathMatcher unexpected error, absoluteFilePath=${absoluteFilePath} was not contained in any of the root folders ${JSON.stringify(
|
`createAbsoluteFilePathMatcher unexpected error, absoluteFilePath=${absoluteFilePath} was not contained in any of the root folders: ${rootFolders.join(
|
||||||
rootFolders,
|
', ',
|
||||||
)}`,
|
)}`,
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
|
@ -9,20 +9,21 @@ import {createHash} from 'crypto';
|
||||||
import _ from 'lodash';
|
import _ from 'lodash';
|
||||||
import {shortName, isNameTooLong} from './pathUtils';
|
import {shortName, isNameTooLong} from './pathUtils';
|
||||||
|
|
||||||
|
/** Thin wrapper around `crypto.createHash("md5")`. */
|
||||||
export function md5Hash(str: string): string {
|
export function md5Hash(str: string): string {
|
||||||
return createHash('md5').update(str).digest('hex');
|
return createHash('md5').update(str).digest('hex');
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/** Creates an MD5 hash and truncates it to the given length. */
|
||||||
export function simpleHash(str: string, length: number): string {
|
export function simpleHash(str: string, length: number): string {
|
||||||
return md5Hash(str).substr(0, length);
|
return md5Hash(str).substring(0, length);
|
||||||
}
|
}
|
||||||
|
|
||||||
// Based on https://github.com/gatsbyjs/gatsby/pull/21518/files
|
// Based on https://github.com/gatsbyjs/gatsby/pull/21518/files
|
||||||
/**
|
/**
|
||||||
* Given an input string, convert to kebab-case and append a hash.
|
* Given an input string, convert to kebab-case and append a hash, avoiding name
|
||||||
* Avoid str collision.
|
* collision. Also removes part of the string if its larger than the allowed
|
||||||
* Also removes part of the string if its larger than the allowed
|
* filename per OS, avoiding `ERRNAMETOOLONG` error.
|
||||||
* filename per OS. Avoids ERRNAMETOOLONG error.
|
|
||||||
*/
|
*/
|
||||||
export function docuHash(str: string): string {
|
export function docuHash(str: string): string {
|
||||||
if (str === '/') {
|
if (str === '/') {
|
||||||
|
|
|
@ -8,16 +8,21 @@
|
||||||
import path from 'path';
|
import path from 'path';
|
||||||
import _ from 'lodash';
|
import _ from 'lodash';
|
||||||
import type {TranslationFileContent, TranslationFile} from '@docusaurus/types';
|
import type {TranslationFileContent, TranslationFile} from '@docusaurus/types';
|
||||||
import {DEFAULT_PLUGIN_ID} from './constants';
|
import {DEFAULT_PLUGIN_ID, I18N_DIR_NAME} from './constants';
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Takes a list of translation file contents, and shallow-merges them into one.
|
||||||
|
*/
|
||||||
export function mergeTranslations(
|
export function mergeTranslations(
|
||||||
contents: TranslationFileContent[],
|
contents: TranslationFileContent[],
|
||||||
): TranslationFileContent {
|
): TranslationFileContent {
|
||||||
return contents.reduce((acc, content) => ({...acc, ...content}), {});
|
return contents.reduce((acc, content) => ({...acc, ...content}), {});
|
||||||
}
|
}
|
||||||
|
|
||||||
// Useful to update all the messages of a translation file
|
/**
|
||||||
// Used in tests to simulate translations
|
* Useful to update all the messages of a translation file. Used in tests to
|
||||||
|
* simulate translations.
|
||||||
|
*/
|
||||||
export function updateTranslationFileMessages(
|
export function updateTranslationFileMessages(
|
||||||
translationFile: TranslationFile,
|
translationFile: TranslationFile,
|
||||||
updateMessage: (message: string) => string,
|
updateMessage: (message: string) => string,
|
||||||
|
@ -31,6 +36,10 @@ export function updateTranslationFileMessages(
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Takes everything needed and constructs a plugin i18n path. Plugins should
|
||||||
|
* expect everything it needs for translations to be found under this path.
|
||||||
|
*/
|
||||||
export function getPluginI18nPath({
|
export function getPluginI18nPath({
|
||||||
siteDir,
|
siteDir,
|
||||||
locale,
|
locale,
|
||||||
|
@ -46,7 +55,7 @@ export function getPluginI18nPath({
|
||||||
}): string {
|
}): string {
|
||||||
return path.join(
|
return path.join(
|
||||||
siteDir,
|
siteDir,
|
||||||
'i18n',
|
I18N_DIR_NAME,
|
||||||
// namespace first by locale: convenient to work in a single folder for a
|
// namespace first by locale: convenient to work in a single folder for a
|
||||||
// translator
|
// translator
|
||||||
locale,
|
locale,
|
||||||
|
|
|
@ -13,9 +13,11 @@ export {
|
||||||
BABEL_CONFIG_FILE_NAME,
|
BABEL_CONFIG_FILE_NAME,
|
||||||
GENERATED_FILES_DIR_NAME,
|
GENERATED_FILES_DIR_NAME,
|
||||||
SRC_DIR_NAME,
|
SRC_DIR_NAME,
|
||||||
STATIC_DIR_NAME,
|
DEFAULT_STATIC_DIR_NAME,
|
||||||
OUTPUT_STATIC_ASSETS_DIR_NAME,
|
OUTPUT_STATIC_ASSETS_DIR_NAME,
|
||||||
THEME_PATH,
|
THEME_PATH,
|
||||||
|
I18N_DIR_NAME,
|
||||||
|
CODE_TRANSLATIONS_FILE_NAME,
|
||||||
DEFAULT_PORT,
|
DEFAULT_PORT,
|
||||||
DEFAULT_PLUGIN_ID,
|
DEFAULT_PLUGIN_ID,
|
||||||
WEBPACK_URL_LOADER_LIMIT,
|
WEBPACK_URL_LOADER_LIMIT,
|
||||||
|
@ -34,7 +36,6 @@ export {
|
||||||
export {
|
export {
|
||||||
removeSuffix,
|
removeSuffix,
|
||||||
removePrefix,
|
removePrefix,
|
||||||
getElementsAround,
|
|
||||||
mapAsyncSequential,
|
mapAsyncSequential,
|
||||||
findAsyncSequential,
|
findAsyncSequential,
|
||||||
reportMessage,
|
reportMessage,
|
||||||
|
@ -56,8 +57,6 @@ export {
|
||||||
export {
|
export {
|
||||||
type Tag,
|
type Tag,
|
||||||
type FrontMatterTag,
|
type FrontMatterTag,
|
||||||
type TaggedItemGroup,
|
|
||||||
normalizeFrontMatterTag,
|
|
||||||
normalizeFrontMatterTags,
|
normalizeFrontMatterTags,
|
||||||
groupTaggedItems,
|
groupTaggedItems,
|
||||||
} from './tags';
|
} from './tags';
|
||||||
|
@ -73,8 +72,6 @@ export {
|
||||||
export {
|
export {
|
||||||
type ContentPaths,
|
type ContentPaths,
|
||||||
type BrokenMarkdownLink,
|
type BrokenMarkdownLink,
|
||||||
type ReplaceMarkdownLinksParams,
|
|
||||||
type ReplaceMarkdownLinksReturn,
|
|
||||||
replaceMarkdownLinks,
|
replaceMarkdownLinks,
|
||||||
} from './markdownLinks';
|
} from './markdownLinks';
|
||||||
export {type SluggerOptions, type Slugger, createSlugger} from './slugger';
|
export {type SluggerOptions, type Slugger, createSlugger} from './slugger';
|
||||||
|
|
|
@ -8,36 +8,27 @@
|
||||||
import type {ReportingSeverity} from '@docusaurus/types';
|
import type {ReportingSeverity} from '@docusaurus/types';
|
||||||
import logger from '@docusaurus/logger';
|
import logger from '@docusaurus/logger';
|
||||||
|
|
||||||
|
/** Removes a given string suffix from `str`. */
|
||||||
export function removeSuffix(str: string, suffix: string): string {
|
export function removeSuffix(str: string, suffix: string): string {
|
||||||
if (suffix === '') {
|
if (suffix === '') {
|
||||||
return str; // always returns "" otherwise!
|
// str.slice(0, 0) is ""
|
||||||
|
return str;
|
||||||
}
|
}
|
||||||
return str.endsWith(suffix) ? str.slice(0, -suffix.length) : str;
|
return str.endsWith(suffix) ? str.slice(0, -suffix.length) : str;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/** Removes a given string prefix from `str`. */
|
||||||
export function removePrefix(str: string, prefix: string): string {
|
export function removePrefix(str: string, prefix: string): string {
|
||||||
return str.startsWith(prefix) ? str.slice(prefix.length) : str;
|
return str.startsWith(prefix) ? str.slice(prefix.length) : str;
|
||||||
}
|
}
|
||||||
|
|
||||||
export function getElementsAround<T>(
|
/**
|
||||||
array: T[],
|
* `Array#map` for async operations where order matters.
|
||||||
aroundIndex: number,
|
* @param array The array to traverse.
|
||||||
): {
|
* @param action An async action to be performed on every array item. Will be
|
||||||
next: T | undefined;
|
* awaited before working on the next.
|
||||||
previous: T | undefined;
|
* @returns The list of results returned from every `action(item)`
|
||||||
} {
|
*/
|
||||||
const min = 0;
|
|
||||||
const max = array.length - 1;
|
|
||||||
if (aroundIndex < min || aroundIndex > max) {
|
|
||||||
throw new Error(
|
|
||||||
`Valid "aroundIndex" for array (of size ${array.length}) are between ${min} and ${max}, but you provided ${aroundIndex}.`,
|
|
||||||
);
|
|
||||||
}
|
|
||||||
const previous = aroundIndex === min ? undefined : array[aroundIndex - 1];
|
|
||||||
const next = aroundIndex === max ? undefined : array[aroundIndex + 1];
|
|
||||||
return {previous, next};
|
|
||||||
}
|
|
||||||
|
|
||||||
export async function mapAsyncSequential<T, R>(
|
export async function mapAsyncSequential<T, R>(
|
||||||
array: T[],
|
array: T[],
|
||||||
action: (t: T) => Promise<R>,
|
action: (t: T) => Promise<R>,
|
||||||
|
@ -50,6 +41,14 @@ export async function mapAsyncSequential<T, R>(
|
||||||
return results;
|
return results;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* `Array#find` for async operations where order matters.
|
||||||
|
* @param array The array to traverse.
|
||||||
|
* @param predicate An async predicate to be called on every array item. Should
|
||||||
|
* return a boolean indicating whether the currently element should be returned.
|
||||||
|
* @returns The function immediately returns the first item on which `predicate`
|
||||||
|
* returns `true`, or `undefined` if none matches the predicate.
|
||||||
|
*/
|
||||||
export async function findAsyncSequential<T>(
|
export async function findAsyncSequential<T>(
|
||||||
array: T[],
|
array: T[],
|
||||||
predicate: (t: T) => Promise<boolean>,
|
predicate: (t: T) => Promise<boolean>,
|
||||||
|
@ -62,6 +61,21 @@ export async function findAsyncSequential<T>(
|
||||||
return undefined;
|
return undefined;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Takes a message and reports it according to the severity that the user wants.
|
||||||
|
*
|
||||||
|
* - `ignore`: completely no-op
|
||||||
|
* - `log`: uses the `INFO` log level
|
||||||
|
* - `warn`: uses the `WARN` log level
|
||||||
|
* - `error`: uses the `ERROR` log level
|
||||||
|
* - `throw`: aborts the process, throws the error.
|
||||||
|
*
|
||||||
|
* Since the logger doesn't have logging level filters yet, these severities
|
||||||
|
* mostly just differ by their colors.
|
||||||
|
*
|
||||||
|
* @throws In addition to throwing when `reportingSeverity === "throw"`, this
|
||||||
|
* function also throws if `reportingSeverity` is not one of the above.
|
||||||
|
*/
|
||||||
export function reportMessage(
|
export function reportMessage(
|
||||||
message: string,
|
message: string,
|
||||||
reportingSeverity: ReportingSeverity,
|
reportingSeverity: ReportingSeverity,
|
||||||
|
|
|
@ -6,41 +6,79 @@
|
||||||
*/
|
*/
|
||||||
|
|
||||||
import path from 'path';
|
import path from 'path';
|
||||||
|
import {getContentPathList} from './dataFileUtils';
|
||||||
import {aliasedSitePath} from './pathUtils';
|
import {aliasedSitePath} from './pathUtils';
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Content plugins have a base path and a localized path to source content from.
|
||||||
|
* We will look into the localized path in priority.
|
||||||
|
*/
|
||||||
export type ContentPaths = {
|
export type ContentPaths = {
|
||||||
|
/**
|
||||||
|
* The absolute path to the base content directory, like `"<siteDir>/docs"`.
|
||||||
|
*/
|
||||||
contentPath: string;
|
contentPath: string;
|
||||||
|
/**
|
||||||
|
* The absolute path to the localized content directory, like
|
||||||
|
* `"<siteDir>/i18n/zh-Hans/plugin-content-docs"`.
|
||||||
|
*/
|
||||||
contentPathLocalized: string;
|
contentPathLocalized: string;
|
||||||
};
|
};
|
||||||
|
|
||||||
|
/** Data structure representing each broken Markdown link to be reported. */
|
||||||
export type BrokenMarkdownLink<T extends ContentPaths> = {
|
export type BrokenMarkdownLink<T extends ContentPaths> = {
|
||||||
|
/** Absolute path to the file containing this link. */
|
||||||
filePath: string;
|
filePath: string;
|
||||||
|
/**
|
||||||
|
* This is generic because it may contain extra metadata like version name,
|
||||||
|
* which the reporter can provide for context.
|
||||||
|
*/
|
||||||
contentPaths: T;
|
contentPaths: T;
|
||||||
|
/**
|
||||||
|
* The content of the link, like `"./brokenFile.md"`
|
||||||
|
*/
|
||||||
link: string;
|
link: string;
|
||||||
};
|
};
|
||||||
|
|
||||||
export type ReplaceMarkdownLinksParams<T extends ContentPaths> = {
|
/**
|
||||||
siteDir: string;
|
* Takes a Markdown file and replaces relative file references with their URL
|
||||||
fileString: string;
|
* counterparts, e.g. `[link](./intro.md)` => `[link](/docs/intro)`, preserving
|
||||||
filePath: string;
|
* everything else.
|
||||||
contentPaths: T;
|
*
|
||||||
sourceToPermalink: Record<string, string>;
|
* This method uses best effort to find a matching file. The file reference can
|
||||||
};
|
* be relative to the directory of the current file (most likely) or any of the
|
||||||
|
* content paths (so `/tutorials/intro.md` can be resolved as
|
||||||
export type ReplaceMarkdownLinksReturn<T extends ContentPaths> = {
|
* `<siteDir>/docs/tutorials/intro.md`). Links that contain the `http(s):` or
|
||||||
newContent: string;
|
* `@site/` prefix will always be ignored.
|
||||||
brokenMarkdownLinks: BrokenMarkdownLink<T>[];
|
*/
|
||||||
};
|
|
||||||
|
|
||||||
export function replaceMarkdownLinks<T extends ContentPaths>({
|
export function replaceMarkdownLinks<T extends ContentPaths>({
|
||||||
siteDir,
|
siteDir,
|
||||||
fileString,
|
fileString,
|
||||||
filePath,
|
filePath,
|
||||||
contentPaths,
|
contentPaths,
|
||||||
sourceToPermalink,
|
sourceToPermalink,
|
||||||
}: ReplaceMarkdownLinksParams<T>): ReplaceMarkdownLinksReturn<T> {
|
}: {
|
||||||
const {contentPath, contentPathLocalized} = contentPaths;
|
/** Absolute path to the site directory, used to resolve aliased paths. */
|
||||||
|
siteDir: string;
|
||||||
|
/** The Markdown file content to be processed. */
|
||||||
|
fileString: string;
|
||||||
|
/** Absolute path to the current file containing `fileString`. */
|
||||||
|
filePath: string;
|
||||||
|
/** The content paths which the file reference may live in. */
|
||||||
|
contentPaths: T;
|
||||||
|
/**
|
||||||
|
* A map from source paths to their URLs. Source paths are `@site` aliased.
|
||||||
|
*/
|
||||||
|
sourceToPermalink: Record<string, string>;
|
||||||
|
}): {
|
||||||
|
/**
|
||||||
|
* The content with all Markdown file references replaced with their URLs.
|
||||||
|
* Unresolved links are left as-is.
|
||||||
|
*/
|
||||||
|
newContent: string;
|
||||||
|
/** The list of broken links, */
|
||||||
|
brokenMarkdownLinks: BrokenMarkdownLink<T>[];
|
||||||
|
} {
|
||||||
const brokenMarkdownLinks: BrokenMarkdownLink<T>[] = [];
|
const brokenMarkdownLinks: BrokenMarkdownLink<T>[] = [];
|
||||||
|
|
||||||
// Replace internal markdown linking (except in fenced blocks).
|
// Replace internal markdown linking (except in fenced blocks).
|
||||||
|
@ -64,9 +102,8 @@ export function replaceMarkdownLinks<T extends ContentPaths>({
|
||||||
|
|
||||||
let modifiedLine = line;
|
let modifiedLine = line;
|
||||||
// Replace inline-style links or reference-style links e.g:
|
// Replace inline-style links or reference-style links e.g:
|
||||||
// This is [Document 1](doc1.md) -> we replace this doc1.md with correct
|
// This is [Document 1](doc1.md)
|
||||||
// ink
|
// [doc1]: doc1.md
|
||||||
// [doc1]: doc1.md -> we replace this doc1.md with correct link
|
|
||||||
const mdRegex =
|
const mdRegex =
|
||||||
/(?:\]\(|\]:\s*)(?!https?:\/\/|@site\/)(?<filename>[^'")\]\s>]+\.mdx?)/g;
|
/(?:\]\(|\]:\s*)(?!https?:\/\/|@site\/)(?<filename>[^'")\]\s>]+\.mdx?)/g;
|
||||||
let mdMatch = mdRegex.exec(modifiedLine);
|
let mdMatch = mdRegex.exec(modifiedLine);
|
||||||
|
@ -75,10 +112,9 @@ export function replaceMarkdownLinks<T extends ContentPaths>({
|
||||||
const mdLink = mdMatch.groups!.filename!;
|
const mdLink = mdMatch.groups!.filename!;
|
||||||
|
|
||||||
const sourcesToTry = [
|
const sourcesToTry = [
|
||||||
path.resolve(path.dirname(filePath), decodeURIComponent(mdLink)),
|
path.dirname(filePath),
|
||||||
`${contentPathLocalized}/${decodeURIComponent(mdLink)}`,
|
...getContentPathList(contentPaths),
|
||||||
`${contentPath}/${decodeURIComponent(mdLink)}`,
|
].map((p) => path.join(p, decodeURIComponent(mdLink)));
|
||||||
];
|
|
||||||
|
|
||||||
const aliasedSourceMatch = sourcesToTry
|
const aliasedSourceMatch = sourcesToTry
|
||||||
.map((source) => aliasedSitePath(source, siteDir))
|
.map((source) => aliasedSitePath(source, siteDir))
|
||||||
|
|
|
@ -7,12 +7,25 @@
|
||||||
|
|
||||||
import logger from '@docusaurus/logger';
|
import logger from '@docusaurus/logger';
|
||||||
import matter from 'gray-matter';
|
import matter from 'gray-matter';
|
||||||
import {createSlugger, type Slugger} from './slugger';
|
import {createSlugger, type Slugger, type SluggerOptions} from './slugger';
|
||||||
|
|
||||||
// Input: ## Some heading {#some-heading}
|
// Some utilities for parsing Markdown content. These things are only used on
|
||||||
// Output: {text: "## Some heading", id: "some-heading"}
|
// server-side when we infer metadata like `title` and `description` from the
|
||||||
|
// content. Most parsing is still done in MDX through the mdx-loader.
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Parses custom ID from a heading. The ID must be composed of letters,
|
||||||
|
* underscores, and dashes only.
|
||||||
|
*
|
||||||
|
* @param heading e.g. `## Some heading {#some-heading}` where the last
|
||||||
|
* character must be `}` for the ID to be recognized
|
||||||
|
*/
|
||||||
export function parseMarkdownHeadingId(heading: string): {
|
export function parseMarkdownHeadingId(heading: string): {
|
||||||
|
/**
|
||||||
|
* The heading content sans the ID part, right-trimmed. e.g. `## Some heading`
|
||||||
|
*/
|
||||||
text: string;
|
text: string;
|
||||||
|
/** The heading ID. e.g. `some-heading` */
|
||||||
id?: string;
|
id?: string;
|
||||||
} {
|
} {
|
||||||
const customHeadingIdRegex = /\s*\{#(?<id>[\w-]+)\}$/;
|
const customHeadingIdRegex = /\s*\{#(?<id>[\w-]+)\}$/;
|
||||||
|
@ -26,26 +39,40 @@ export function parseMarkdownHeadingId(heading: string): {
|
||||||
return {text: heading, id: undefined};
|
return {text: heading, id: undefined};
|
||||||
}
|
}
|
||||||
|
|
||||||
// Hacky way of stripping out import statements from the excerpt
|
|
||||||
// TODO: Find a better way to do so, possibly by compiling the Markdown content,
|
// TODO: Find a better way to do so, possibly by compiling the Markdown content,
|
||||||
// stripping out HTML tags and obtaining the first line.
|
// stripping out HTML tags and obtaining the first line.
|
||||||
|
/**
|
||||||
|
* Creates an excerpt of a Markdown file. This function will:
|
||||||
|
*
|
||||||
|
* - Ignore h1 headings (setext or atx)
|
||||||
|
* - Ignore import/export
|
||||||
|
* - Ignore code blocks
|
||||||
|
*
|
||||||
|
* And for the first contentful line, it will strip away most Markdown
|
||||||
|
* syntax, including HTML tags, emphasis, links (keeping the text), etc.
|
||||||
|
*/
|
||||||
export function createExcerpt(fileString: string): string | undefined {
|
export function createExcerpt(fileString: string): string | undefined {
|
||||||
const fileLines = fileString
|
const fileLines = fileString
|
||||||
.trimLeft()
|
.trimStart()
|
||||||
// Remove Markdown alternate title
|
// Remove Markdown alternate title
|
||||||
.replace(/^[^\n]*\n[=]+/g, '')
|
.replace(/^[^\n]*\n[=]+/g, '')
|
||||||
.split('\n');
|
.split('\n');
|
||||||
let inCode = false;
|
let inCode = false;
|
||||||
|
let inImport = false;
|
||||||
let lastCodeFence = '';
|
let lastCodeFence = '';
|
||||||
|
|
||||||
for (const fileLine of fileLines) {
|
for (const fileLine of fileLines) {
|
||||||
|
if (fileLine === '' && inImport) {
|
||||||
|
inImport = false;
|
||||||
|
}
|
||||||
// Skip empty line.
|
// Skip empty line.
|
||||||
if (!fileLine.trim()) {
|
if (!fileLine.trim()) {
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
|
|
||||||
// Skip import/export declaration.
|
// Skip import/export declaration.
|
||||||
if (/^(?:import|export)\s.*/.test(fileLine)) {
|
if ((/^(?:import|export)\s.*/.test(fileLine) || inImport) && !inCode) {
|
||||||
|
inImport = true;
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -102,8 +129,22 @@ export function createExcerpt(fileString: string): string | undefined {
|
||||||
return undefined;
|
return undefined;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Takes a raw Markdown file content, and parses the front matter using
|
||||||
|
* gray-matter. Worth noting that gray-matter accepts TOML and other markup
|
||||||
|
* languages as well.
|
||||||
|
*
|
||||||
|
* @throws Throws when gray-matter throws. e.g.:
|
||||||
|
* ```md
|
||||||
|
* ---
|
||||||
|
* foo: : bar
|
||||||
|
* ---
|
||||||
|
* ```
|
||||||
|
*/
|
||||||
export function parseFrontMatter(markdownFileContent: string): {
|
export function parseFrontMatter(markdownFileContent: string): {
|
||||||
|
/** Front matter as parsed by gray-matter. */
|
||||||
frontMatter: Record<string, unknown>;
|
frontMatter: Record<string, unknown>;
|
||||||
|
/** The remaining content, trimmed. */
|
||||||
content: string;
|
content: string;
|
||||||
} {
|
} {
|
||||||
const {data, content} = matter(markdownFileContent);
|
const {data, content} = matter(markdownFileContent);
|
||||||
|
@ -113,11 +154,6 @@ export function parseFrontMatter(markdownFileContent: string): {
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
|
||||||
* Try to convert markdown heading to text. Does not need to be perfect, it is
|
|
||||||
* only used as a fallback when frontMatter.title is not provided. For now, we
|
|
||||||
* just unwrap possible inline code blocks (# `config.js`)
|
|
||||||
*/
|
|
||||||
function toTextContentTitle(contentTitle: string): string {
|
function toTextContentTitle(contentTitle: string): string {
|
||||||
if (contentTitle.startsWith('`') && contentTitle.endsWith('`')) {
|
if (contentTitle.startsWith('`') && contentTitle.endsWith('`')) {
|
||||||
return contentTitle.substring(1, contentTitle.length - 1);
|
return contentTitle.substring(1, contentTitle.length - 1);
|
||||||
|
@ -125,10 +161,36 @@ function toTextContentTitle(contentTitle: string): string {
|
||||||
return contentTitle;
|
return contentTitle;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
type ParseMarkdownContentTitleOptions = {
|
||||||
|
/**
|
||||||
|
* If `true`, the matching title will be removed from the returned content.
|
||||||
|
* We can promise that at least one empty line will be left between the
|
||||||
|
* content before and after, but you shouldn't make too much assumption
|
||||||
|
* about what's left.
|
||||||
|
*/
|
||||||
|
removeContentTitle?: boolean;
|
||||||
|
};
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Takes the raw Markdown content, without front matter, and tries to find an h1
|
||||||
|
* title (setext or atx) to be used as metadata.
|
||||||
|
*
|
||||||
|
* It only searches until the first contentful paragraph, ignoring import/export
|
||||||
|
* declarations.
|
||||||
|
*
|
||||||
|
* It will try to convert markdown to reasonable text, but won't be best effort,
|
||||||
|
* since it's only used as a fallback when `frontMatter.title` is not provided.
|
||||||
|
* For now, we just unwrap inline code (``# `config.js` `` => `config.js`).
|
||||||
|
*/
|
||||||
export function parseMarkdownContentTitle(
|
export function parseMarkdownContentTitle(
|
||||||
contentUntrimmed: string,
|
contentUntrimmed: string,
|
||||||
options?: {removeContentTitle?: boolean},
|
options?: ParseMarkdownContentTitleOptions,
|
||||||
): {content: string; contentTitle: string | undefined} {
|
): {
|
||||||
|
/** The content, optionally without the content title. */
|
||||||
|
content: string;
|
||||||
|
/** The title, trimmed and without the `#`. */
|
||||||
|
contentTitle: string | undefined;
|
||||||
|
} {
|
||||||
const removeContentTitleOption = options?.removeContentTitle ?? false;
|
const removeContentTitleOption = options?.removeContentTitle ?? false;
|
||||||
|
|
||||||
const content = contentUntrimmed.trim();
|
const content = contentUntrimmed.trim();
|
||||||
|
@ -171,17 +233,28 @@ export function parseMarkdownContentTitle(
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
type ParsedMarkdown = {
|
/**
|
||||||
frontMatter: Record<string, unknown>;
|
* Makes a full-round parse.
|
||||||
content: string;
|
*
|
||||||
contentTitle: string | undefined;
|
* @throws Throws when `parseFrontMatter` throws, usually because of invalid
|
||||||
excerpt: string | undefined;
|
* syntax.
|
||||||
};
|
*/
|
||||||
|
|
||||||
export function parseMarkdownString(
|
export function parseMarkdownString(
|
||||||
markdownFileContent: string,
|
markdownFileContent: string,
|
||||||
options?: {removeContentTitle?: boolean},
|
options?: ParseMarkdownContentTitleOptions,
|
||||||
): ParsedMarkdown {
|
): {
|
||||||
|
/** @see {@link parseFrontMatter} */
|
||||||
|
frontMatter: Record<string, unknown>;
|
||||||
|
/** @see {@link parseMarkdownContentTitle} */
|
||||||
|
contentTitle: string | undefined;
|
||||||
|
/** @see {@link createExcerpt} */
|
||||||
|
excerpt: string | undefined;
|
||||||
|
/**
|
||||||
|
* Content without front matter and (optionally) without title, depending on
|
||||||
|
* the `removeContentTitle` option.
|
||||||
|
*/
|
||||||
|
content: string;
|
||||||
|
} {
|
||||||
try {
|
try {
|
||||||
const {frontMatter, content: contentWithoutFrontMatter} =
|
const {frontMatter, content: contentWithoutFrontMatter} =
|
||||||
parseFrontMatter(markdownFileContent);
|
parseFrontMatter(markdownFileContent);
|
||||||
|
@ -229,11 +302,16 @@ function addHeadingId(
|
||||||
return `${headingHashes}${headingText} {#${slug}}`;
|
return `${headingHashes}${headingText} {#${slug}}`;
|
||||||
}
|
}
|
||||||
|
|
||||||
export type WriteHeadingIDOptions = {
|
export type WriteHeadingIDOptions = SluggerOptions & {
|
||||||
maintainCase?: boolean;
|
/** Overwrite existing heading IDs. */
|
||||||
overwrite?: boolean;
|
overwrite?: boolean;
|
||||||
};
|
};
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Takes Markdown content, returns new content with heading IDs written.
|
||||||
|
* Respects existing IDs (unless `overwrite=true`) and never generates colliding
|
||||||
|
* IDs (through the slugger).
|
||||||
|
*/
|
||||||
export function writeMarkdownHeadingId(
|
export function writeMarkdownHeadingId(
|
||||||
content: string,
|
content: string,
|
||||||
options: WriteHeadingIDOptions = {maintainCase: false, overwrite: false},
|
options: WriteHeadingIDOptions = {maintainCase: false, overwrite: false},
|
||||||
|
|
|
@ -24,7 +24,7 @@ export const isNameTooLong = (str: string): boolean =>
|
||||||
? str.length + SPACE_FOR_APPENDING > MAX_PATH_SEGMENT_CHARS // MacOS (APFS) and Windows (NTFS) filename length limit (255 chars)
|
? str.length + SPACE_FOR_APPENDING > MAX_PATH_SEGMENT_CHARS // MacOS (APFS) and Windows (NTFS) filename length limit (255 chars)
|
||||||
: Buffer.from(str).length + SPACE_FOR_APPENDING > MAX_PATH_SEGMENT_BYTES; // Other (255 bytes)
|
: Buffer.from(str).length + SPACE_FOR_APPENDING > MAX_PATH_SEGMENT_BYTES; // Other (255 bytes)
|
||||||
|
|
||||||
export const shortName = (str: string): string => {
|
export function shortName(str: string): string {
|
||||||
if (isMacOs() || isWindows()) {
|
if (isMacOs() || isWindows()) {
|
||||||
const overflowingChars = str.length - MAX_PATH_SEGMENT_CHARS;
|
const overflowingChars = str.length - MAX_PATH_SEGMENT_CHARS;
|
||||||
return str.slice(
|
return str.slice(
|
||||||
|
@ -41,7 +41,7 @@ export const shortName = (str: string): string => {
|
||||||
Buffer.byteLength(strBuffer) - overflowingBytes - SPACE_FOR_APPENDING - 1,
|
Buffer.byteLength(strBuffer) - overflowingBytes - SPACE_FOR_APPENDING - 1,
|
||||||
)
|
)
|
||||||
.toString();
|
.toString();
|
||||||
};
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Convert Windows backslash paths to posix style paths.
|
* Convert Windows backslash paths to posix style paths.
|
||||||
|
|
|
@ -10,12 +10,24 @@ import GithubSlugger from 'github-slugger';
|
||||||
// We create our own abstraction on top of the lib:
|
// We create our own abstraction on top of the lib:
|
||||||
// - unify usage everywhere in the codebase
|
// - unify usage everywhere in the codebase
|
||||||
// - ability to add extra options
|
// - ability to add extra options
|
||||||
export type SluggerOptions = {maintainCase?: boolean};
|
export type SluggerOptions = {
|
||||||
|
/** Keep the headings' casing, otherwise make all lowercase. */
|
||||||
|
maintainCase?: boolean;
|
||||||
|
};
|
||||||
|
|
||||||
export type Slugger = {
|
export type Slugger = {
|
||||||
|
/**
|
||||||
|
* Takes a Markdown heading like "Josh Cena" and sluggifies it according to
|
||||||
|
* GitHub semantics (in this case `josh-cena`). Stateful, because if you try
|
||||||
|
* to sluggify "Josh Cena" again it would return `josh-cena-1`.
|
||||||
|
*/
|
||||||
slug: (value: string, options?: SluggerOptions) => string;
|
slug: (value: string, options?: SluggerOptions) => string;
|
||||||
};
|
};
|
||||||
|
|
||||||
|
/**
|
||||||
|
* A thin wrapper around github-slugger. This is a factory function that returns
|
||||||
|
* a stateful Slugger object.
|
||||||
|
*/
|
||||||
export function createSlugger(): Slugger {
|
export function createSlugger(): Slugger {
|
||||||
const githubSlugger = new GithubSlugger();
|
const githubSlugger = new GithubSlugger();
|
||||||
return {
|
return {
|
||||||
|
|
|
@ -10,12 +10,13 @@ import {normalizeUrl} from './urlUtils';
|
||||||
|
|
||||||
export type Tag = {
|
export type Tag = {
|
||||||
label: string;
|
label: string;
|
||||||
|
/** Permalink to this tag's page, without the `/tags/` base path. */
|
||||||
permalink: string;
|
permalink: string;
|
||||||
};
|
};
|
||||||
|
|
||||||
export type FrontMatterTag = string | Tag;
|
export type FrontMatterTag = string | Tag;
|
||||||
|
|
||||||
export function normalizeFrontMatterTag(
|
function normalizeFrontMatterTag(
|
||||||
tagsPath: string,
|
tagsPath: string,
|
||||||
frontMatterTag: FrontMatterTag,
|
frontMatterTag: FrontMatterTag,
|
||||||
): Tag {
|
): Tag {
|
||||||
|
@ -45,8 +46,19 @@ export function normalizeFrontMatterTag(
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Takes tag objects as they are defined in front matter, and normalizes each
|
||||||
|
* into a standard tag object. The permalink is created by appending the
|
||||||
|
* sluggified label to `tagsPath`. Front matter tags already containing
|
||||||
|
* permalinks would still have `tagsPath` prepended.
|
||||||
|
*
|
||||||
|
* The result will always be unique by permalinks. The behavior with colliding
|
||||||
|
* permalinks is undetermined.
|
||||||
|
*/
|
||||||
export function normalizeFrontMatterTags(
|
export function normalizeFrontMatterTags(
|
||||||
|
/** Base path to append the tag permalinks to. */
|
||||||
tagsPath: string,
|
tagsPath: string,
|
||||||
|
/** Can be `undefined`, so that we can directly pipe in `frontMatter.tags`. */
|
||||||
frontMatterTags: FrontMatterTag[] | undefined = [],
|
frontMatterTags: FrontMatterTag[] | undefined = [],
|
||||||
): Tag[] {
|
): Tag[] {
|
||||||
const tags = frontMatterTags.map((tag) =>
|
const tags = frontMatterTags.map((tag) =>
|
||||||
|
@ -56,29 +68,34 @@ export function normalizeFrontMatterTags(
|
||||||
return _.uniqBy(tags, (tag) => tag.permalink);
|
return _.uniqBy(tags, (tag) => tag.permalink);
|
||||||
}
|
}
|
||||||
|
|
||||||
export type TaggedItemGroup<Item> = {
|
type TaggedItemGroup<Item> = {
|
||||||
tag: Tag;
|
tag: Tag;
|
||||||
items: Item[];
|
items: Item[];
|
||||||
};
|
};
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Permits to group docs/blogPosts by tag (provided by front matter)
|
* Permits to group docs/blog posts by tag (provided by front matter).
|
||||||
* Note: groups are indexed by permalink, because routes must be unique in the
|
*
|
||||||
* end. Labels may vary on 2 md files but they are normalized. Docs with
|
* @returns a map from tag permalink to the items and other relevant tag data.
|
||||||
* label='some label' and label='some-label' should end-up in the same
|
* The record is indexed by permalink, because routes must be unique in the end.
|
||||||
* group/page in the end. We can't create 2 routes /some-label because one would
|
* Labels may vary on 2 MD files but they are normalized. Docs with
|
||||||
* override the other
|
* label='some label' and label='some-label' should end up in the same page.
|
||||||
*/
|
*/
|
||||||
export function groupTaggedItems<Item>(
|
export function groupTaggedItems<Item>(
|
||||||
items: readonly Item[],
|
items: readonly Item[],
|
||||||
|
/**
|
||||||
|
* A callback telling me how to get the tags list of the current item. Usually
|
||||||
|
* simply getting it from some metadata of the current item.
|
||||||
|
*/
|
||||||
getItemTags: (item: Item) => readonly Tag[],
|
getItemTags: (item: Item) => readonly Tag[],
|
||||||
): Record<string, TaggedItemGroup<Item>> {
|
): {[permalink: string]: TaggedItemGroup<Item>} {
|
||||||
const result: Record<string, TaggedItemGroup<Item>> = {};
|
const result: {[permalink: string]: TaggedItemGroup<Item>} = {};
|
||||||
|
|
||||||
function handleItemTag(item: Item, tag: Tag) {
|
items.forEach((item) => {
|
||||||
|
getItemTags(item).forEach((tag) => {
|
||||||
// Init missing tag groups
|
// Init missing tag groups
|
||||||
// TODO: it's not really clear what should be the behavior if 2 items have
|
// TODO: it's not really clear what should be the behavior if 2 tags have
|
||||||
// the same tag but the permalink is different for each
|
// the same permalink but the label is different for each
|
||||||
// For now, the first tag found wins
|
// For now, the first tag found wins
|
||||||
result[tag.permalink] ??= {
|
result[tag.permalink] ??= {
|
||||||
tag,
|
tag,
|
||||||
|
@ -87,11 +104,6 @@ export function groupTaggedItems<Item>(
|
||||||
|
|
||||||
// Add item to group
|
// Add item to group
|
||||||
result[tag.permalink]!.items.push(item);
|
result[tag.permalink]!.items.push(item);
|
||||||
}
|
|
||||||
|
|
||||||
items.forEach((item) => {
|
|
||||||
getItemTags(item).forEach((tag) => {
|
|
||||||
handleItemTag(item, tag);
|
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
|
|
|
@ -8,6 +8,18 @@
|
||||||
import {removeSuffix} from './jsUtils';
|
import {removeSuffix} from './jsUtils';
|
||||||
import resolvePathnameUnsafe from 'resolve-pathname';
|
import resolvePathnameUnsafe from 'resolve-pathname';
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Much like `path.join`, but much better. Takes an array of URL segments, and
|
||||||
|
* joins them into a reasonable URL.
|
||||||
|
*
|
||||||
|
* - `["file:", "/home", "/user/", "website"]` => `file:///home/user/website`
|
||||||
|
* - `["file://", "home", "/user/", "website"]` => `file://home/user/website` (relative!)
|
||||||
|
* - Remove trailing slash before parameters or hash.
|
||||||
|
* - Replace `?` in query parameters with `&`.
|
||||||
|
* - Dedupe forward slashes in the entire path, avoiding protocol slashes.
|
||||||
|
*
|
||||||
|
* @throws {TypeError} If any of the URL segment is not a string, this throws.
|
||||||
|
*/
|
||||||
export function normalizeUrl(rawUrls: string[]): string {
|
export function normalizeUrl(rawUrls: string[]): string {
|
||||||
const urls = [...rawUrls];
|
const urls = [...rawUrls];
|
||||||
const resultArray = [];
|
const resultArray = [];
|
||||||
|
@ -75,8 +87,8 @@ export function normalizeUrl(rawUrls: string[]): string {
|
||||||
}
|
}
|
||||||
|
|
||||||
let str = resultArray.join('/');
|
let str = resultArray.join('/');
|
||||||
// Each input component is now separated by a single slash
|
// Each input component is now separated by a single slash except the possible
|
||||||
// except the possible first plain protocol part.
|
// first plain protocol part.
|
||||||
|
|
||||||
// Remove trailing slash before parameters or hash.
|
// Remove trailing slash before parameters or hash.
|
||||||
str = str.replace(/\/(?<search>\?|&|#[^!])/g, '$1');
|
str = str.replace(/\/(?<search>\?|&|#[^!])/g, '$1');
|
||||||
|
@ -94,6 +106,11 @@ export function normalizeUrl(rawUrls: string[]): string {
|
||||||
return str;
|
return str;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Takes a file's path, relative to its content folder, and computes its edit
|
||||||
|
* URL. If `editUrl` is `undefined`, this returns `undefined`, as is the case
|
||||||
|
* when the user doesn't want an edit URL in her config.
|
||||||
|
*/
|
||||||
export function getEditUrl(
|
export function getEditUrl(
|
||||||
fileRelativePath: string,
|
fileRelativePath: string,
|
||||||
editUrl?: string,
|
editUrl?: string,
|
||||||
|
@ -105,8 +122,8 @@ export function getEditUrl(
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Convert filepath to url path.
|
* Converts file path to a reasonable URL path, e.g. `'index.md'` -> `'/'`,
|
||||||
* Example: 'index.md' -> '/', 'foo/bar.js' -> '/foo/bar',
|
* `'foo/bar.js'` -> `'/foo/bar'`
|
||||||
*/
|
*/
|
||||||
export function fileToPath(file: string): string {
|
export function fileToPath(file: string): string {
|
||||||
const indexRE = /(?<dirname>^|.*\/)index\.(?:mdx?|jsx?|tsx?)$/i;
|
const indexRE = /(?<dirname>^|.*\/)index\.(?:mdx?|jsx?|tsx?)$/i;
|
||||||
|
@ -118,6 +135,13 @@ export function fileToPath(file: string): string {
|
||||||
return `/${file.replace(extRE, '').replace(/\\/g, '/')}`;
|
return `/${file.replace(extRE, '').replace(/\\/g, '/')}`;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Similar to `encodeURI`, but uses `encodeURIComponent` and assumes there's no
|
||||||
|
* query.
|
||||||
|
*
|
||||||
|
* `encodeURI("/question?/answer")` => `"/question?/answer#section"`;
|
||||||
|
* `encodePath("/question?/answer#section")` => `"/question%3F/answer%23foo"`
|
||||||
|
*/
|
||||||
export function encodePath(userPath: string): string {
|
export function encodePath(userPath: string): string {
|
||||||
return userPath
|
return userPath
|
||||||
.split('/')
|
.split('/')
|
||||||
|
@ -125,6 +149,10 @@ export function encodePath(userPath: string): string {
|
||||||
.join('/');
|
.join('/');
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Whether `str` is a valid pathname. It must be absolute, and not contain
|
||||||
|
* special characters.
|
||||||
|
*/
|
||||||
export function isValidPathname(str: string): boolean {
|
export function isValidPathname(str: string): boolean {
|
||||||
if (!str.startsWith('/')) {
|
if (!str.startsWith('/')) {
|
||||||
return false;
|
return false;
|
||||||
|
@ -138,22 +166,31 @@ export function isValidPathname(str: string): boolean {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// resolve pathname and fail fast if resolution fails
|
/**
|
||||||
|
* Resolve pathnames and fail-fast if resolution fails. Uses standard URL
|
||||||
|
* semantics (provided by `resolve-pathname` which is used internally by React
|
||||||
|
* router)
|
||||||
|
*/
|
||||||
export function resolvePathname(to: string, from?: string): string {
|
export function resolvePathname(to: string, from?: string): string {
|
||||||
return resolvePathnameUnsafe(to, from);
|
return resolvePathnameUnsafe(to, from);
|
||||||
}
|
}
|
||||||
|
/** Appends a leading slash to `str`, if one doesn't exist. */
|
||||||
export function addLeadingSlash(str: string): string {
|
export function addLeadingSlash(str: string): string {
|
||||||
return str.startsWith('/') ? str : `/${str}`;
|
return str.startsWith('/') ? str : `/${str}`;
|
||||||
}
|
}
|
||||||
|
|
||||||
// TODO deduplicate: also present in @docusaurus/utils-common
|
// TODO deduplicate: also present in @docusaurus/utils-common
|
||||||
|
/** Appends a trailing slash to `str`, if one doesn't exist. */
|
||||||
export function addTrailingSlash(str: string): string {
|
export function addTrailingSlash(str: string): string {
|
||||||
return str.endsWith('/') ? str : `${str}/`;
|
return str.endsWith('/') ? str : `${str}/`;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/** Removes the trailing slash from `str`. */
|
||||||
export function removeTrailingSlash(str: string): string {
|
export function removeTrailingSlash(str: string): string {
|
||||||
return removeSuffix(str, '/');
|
return removeSuffix(str, '/');
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/** Constructs an SSH URL that can be used to push to GitHub. */
|
||||||
export function buildSshUrl(
|
export function buildSshUrl(
|
||||||
githubHost: string,
|
githubHost: string,
|
||||||
organizationName: string,
|
organizationName: string,
|
||||||
|
@ -166,6 +203,7 @@ export function buildSshUrl(
|
||||||
return `git@${githubHost}:${organizationName}/${projectName}.git`;
|
return `git@${githubHost}:${organizationName}/${projectName}.git`;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/** Constructs an HTTP URL that can be used to push to GitHub. */
|
||||||
export function buildHttpsUrl(
|
export function buildHttpsUrl(
|
||||||
gitCredentials: string,
|
gitCredentials: string,
|
||||||
githubHost: string,
|
githubHost: string,
|
||||||
|
@ -179,6 +217,11 @@ export function buildHttpsUrl(
|
||||||
return `https://${gitCredentials}@${githubHost}/${organizationName}/${projectName}.git`;
|
return `https://${gitCredentials}@${githubHost}/${organizationName}/${projectName}.git`;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Whether the current URL is an SSH protocol. In addition to looking for
|
||||||
|
* `ssh:`, it will also allow protocol-less URLs like
|
||||||
|
* `git@github.com:facebook/docusaurus.git`.
|
||||||
|
*/
|
||||||
export function hasSSHProtocol(sourceRepoUrl: string): boolean {
|
export function hasSSHProtocol(sourceRepoUrl: string): boolean {
|
||||||
try {
|
try {
|
||||||
if (new URL(sourceRepoUrl).protocol === 'ssh:') {
|
if (new URL(sourceRepoUrl).protocol === 'ssh:') {
|
||||||
|
@ -187,6 +230,6 @@ export function hasSSHProtocol(sourceRepoUrl: string): boolean {
|
||||||
return false;
|
return false;
|
||||||
} catch {
|
} catch {
|
||||||
// Fails when there isn't a protocol
|
// Fails when there isn't a protocol
|
||||||
return /^(?:[\w-]+@)?[\w.-]+:[\w./-]+/.test(sourceRepoUrl); // git@github.com:facebook/docusaurus.git
|
return /^(?:[\w-]+@)?[\w.-]+:[\w./-]+/.test(sourceRepoUrl);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -31,7 +31,11 @@ type FileLoaderUtils = {
|
||||||
};
|
};
|
||||||
};
|
};
|
||||||
|
|
||||||
// Inspired by https://github.com/gatsbyjs/gatsby/blob/8e6e021014da310b9cc7d02e58c9b3efe938c665/packages/gatsby/src/utils/webpack-utils.ts#L447
|
/**
|
||||||
|
* Returns unified loader configurations to be used for various file types.
|
||||||
|
*
|
||||||
|
* Inspired by https://github.com/gatsbyjs/gatsby/blob/8e6e021014da310b9cc7d02e58c9b3efe938c665/packages/gatsby/src/utils/webpack-utils.ts#L447
|
||||||
|
*/
|
||||||
export function getFileLoaderUtils(): FileLoaderUtils {
|
export function getFileLoaderUtils(): FileLoaderUtils {
|
||||||
// files/images < urlLoaderLimit will be inlined as base64 strings directly in
|
// files/images < urlLoaderLimit will be inlined as base64 strings directly in
|
||||||
// the html
|
// the html
|
||||||
|
@ -39,7 +43,11 @@ export function getFileLoaderUtils(): FileLoaderUtils {
|
||||||
|
|
||||||
// defines the path/pattern of the assets handled by webpack
|
// defines the path/pattern of the assets handled by webpack
|
||||||
const fileLoaderFileName = (folder: AssetFolder) =>
|
const fileLoaderFileName = (folder: AssetFolder) =>
|
||||||
`${OUTPUT_STATIC_ASSETS_DIR_NAME}/${folder}/[name]-[contenthash].[ext]`;
|
path.posix.join(
|
||||||
|
OUTPUT_STATIC_ASSETS_DIR_NAME,
|
||||||
|
folder,
|
||||||
|
'[name]-[contenthash].[ext]',
|
||||||
|
);
|
||||||
|
|
||||||
const loaders: FileLoaderUtils['loaders'] = {
|
const loaders: FileLoaderUtils['loaders'] = {
|
||||||
file: (options: {folder: AssetFolder}) => ({
|
file: (options: {folder: AssetFolder}) => ({
|
||||||
|
|
|
@ -6,7 +6,10 @@
|
||||||
*/
|
*/
|
||||||
|
|
||||||
import type {DocusaurusConfig, I18nConfig} from '@docusaurus/types';
|
import type {DocusaurusConfig, I18nConfig} from '@docusaurus/types';
|
||||||
import {DEFAULT_CONFIG_FILE_NAME, STATIC_DIR_NAME} from '@docusaurus/utils';
|
import {
|
||||||
|
DEFAULT_CONFIG_FILE_NAME,
|
||||||
|
DEFAULT_STATIC_DIR_NAME,
|
||||||
|
} from '@docusaurus/utils';
|
||||||
import {Joi, URISchema, printWarning} from '@docusaurus/utils-validation';
|
import {Joi, URISchema, printWarning} from '@docusaurus/utils-validation';
|
||||||
|
|
||||||
const DEFAULT_I18N_LOCALE = 'en';
|
const DEFAULT_I18N_LOCALE = 'en';
|
||||||
|
@ -53,7 +56,7 @@ export const DEFAULT_CONFIG: Pick<
|
||||||
noIndex: false,
|
noIndex: false,
|
||||||
tagline: '',
|
tagline: '',
|
||||||
baseUrlIssueBanner: true,
|
baseUrlIssueBanner: true,
|
||||||
staticDirectories: [STATIC_DIR_NAME],
|
staticDirectories: [DEFAULT_STATIC_DIR_NAME],
|
||||||
};
|
};
|
||||||
|
|
||||||
function createPluginSchema(theme: boolean) {
|
function createPluginSchema(theme: boolean) {
|
||||||
|
|
|
@ -14,7 +14,12 @@ import type {
|
||||||
TranslationMessage,
|
TranslationMessage,
|
||||||
InitializedPlugin,
|
InitializedPlugin,
|
||||||
} from '@docusaurus/types';
|
} from '@docusaurus/types';
|
||||||
import {getPluginI18nPath, toMessageRelativeFilePath} from '@docusaurus/utils';
|
import {
|
||||||
|
getPluginI18nPath,
|
||||||
|
toMessageRelativeFilePath,
|
||||||
|
I18N_DIR_NAME,
|
||||||
|
CODE_TRANSLATIONS_FILE_NAME,
|
||||||
|
} from '@docusaurus/utils';
|
||||||
import {Joi} from '@docusaurus/utils-validation';
|
import {Joi} from '@docusaurus/utils-validation';
|
||||||
import logger from '@docusaurus/logger';
|
import logger from '@docusaurus/logger';
|
||||||
|
|
||||||
|
@ -140,7 +145,7 @@ Maybe you should remove them? ${unknownKeys}`;
|
||||||
|
|
||||||
// should we make this configurable?
|
// should we make this configurable?
|
||||||
function getTranslationsDirPath(context: TranslationContext): string {
|
function getTranslationsDirPath(context: TranslationContext): string {
|
||||||
return path.resolve(path.join(context.siteDir, `i18n`));
|
return path.resolve(path.join(context.siteDir, I18N_DIR_NAME));
|
||||||
}
|
}
|
||||||
export function getTranslationsLocaleDirPath(
|
export function getTranslationsLocaleDirPath(
|
||||||
context: TranslationContext,
|
context: TranslationContext,
|
||||||
|
@ -149,7 +154,10 @@ export function getTranslationsLocaleDirPath(
|
||||||
}
|
}
|
||||||
|
|
||||||
function getCodeTranslationsFilePath(context: TranslationContext): string {
|
function getCodeTranslationsFilePath(context: TranslationContext): string {
|
||||||
return path.join(getTranslationsLocaleDirPath(context), 'code.json');
|
return path.join(
|
||||||
|
getTranslationsLocaleDirPath(context),
|
||||||
|
CODE_TRANSLATIONS_FILE_NAME,
|
||||||
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
export async function readCodeTranslationFileContent(
|
export async function readCodeTranslationFileContent(
|
||||||
|
|
|
@ -256,9 +256,13 @@ sebastienlorber
|
||||||
sensical
|
sensical
|
||||||
serializers
|
serializers
|
||||||
setaf
|
setaf
|
||||||
|
setext
|
||||||
sida
|
sida
|
||||||
simen
|
simen
|
||||||
slorber
|
slorber
|
||||||
|
sluggified
|
||||||
|
sluggifies
|
||||||
|
sluggify
|
||||||
spâce
|
spâce
|
||||||
stackblitz
|
stackblitz
|
||||||
stackblitzrc
|
stackblitzrc
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue