mirror of
https://github.com/facebook/docusaurus.git
synced 2025-04-30 18:58:36 +02:00
refactor: capitalize comments (#7188)
* refactor: capitalize comments * revert...
This commit is contained in:
parent
200009008b
commit
fa1ce230ea
99 changed files with 241 additions and 350 deletions
|
@ -16,9 +16,12 @@ const PlaygroundConfigs = {
|
|||
codesandbox:
|
||||
'https://codesandbox.io/s/github/facebook/docusaurus/tree/main/examples/classic',
|
||||
|
||||
// stackblitz: 'https://stackblitz.com/fork/docusaurus', // not updated
|
||||
// stackblitz: 'https://stackblitz.com/github/facebook/docusaurus/tree/main/examples/classic', // slow to load
|
||||
stackblitz: 'https://stackblitz.com/github/facebook/docusaurus/tree/starter', // dedicated branch: faster load
|
||||
// Not updated
|
||||
// stackblitz: 'https://stackblitz.com/fork/docusaurus',
|
||||
// Slow to load
|
||||
// stackblitz: 'https://stackblitz.com/github/facebook/docusaurus/tree/main/examples/classic',
|
||||
// Dedicated branch: faster load
|
||||
stackblitz: 'https://stackblitz.com/github/facebook/docusaurus/tree/starter',
|
||||
};
|
||||
|
||||
const PlaygroundDocumentationUrl = 'https://docusaurus.io/docs/playground';
|
||||
|
|
|
@ -24,27 +24,26 @@ async function generateTemplateExample(template) {
|
|||
`generating ${template} template for codesandbox in the examples folder...`,
|
||||
);
|
||||
|
||||
// run the docusaurus script to create the template in the examples folder
|
||||
// Run the docusaurus script to create the template in the examples folder
|
||||
const command = template.endsWith('-typescript')
|
||||
? template.replace('-typescript', ' -- --typescript')
|
||||
: template;
|
||||
shell.exec(
|
||||
// /!\ we use the published init script on purpose,
|
||||
// because using the local init script is too early and could generate
|
||||
// upcoming/unavailable config options. Remember CodeSandbox templates
|
||||
// will use the published version, not the repo version
|
||||
// We use the published init script on purpose, because the local init is
|
||||
// too new and could generate upcoming/unavailable config options.
|
||||
// Remember CodeSandbox templates will use the published version,
|
||||
// not the repo version.
|
||||
`npm init docusaurus@latest examples/${template} ${command}`,
|
||||
);
|
||||
|
||||
// read the content of the package.json
|
||||
const templatePackageJson = await fs.readJSON(
|
||||
`examples/${template}/package.json`,
|
||||
);
|
||||
|
||||
// attach the dev script which would be used in code sandbox by default
|
||||
// Attach the dev script which would be used in code sandbox by default
|
||||
templatePackageJson.scripts.dev = 'docusaurus start';
|
||||
|
||||
// these example projects are not meant to be published to npm
|
||||
// These example projects are not meant to be published to npm
|
||||
templatePackageJson.private = true;
|
||||
|
||||
// Make sure package.json name is not "examples-classic". The package.json
|
||||
|
@ -58,13 +57,12 @@ async function generateTemplateExample(template) {
|
|||
? 'Docusaurus example project'
|
||||
: `Docusaurus example project (${template} template)`;
|
||||
|
||||
// rewrite the package.json file with the new edit
|
||||
await fs.writeFile(
|
||||
`./examples/${template}/package.json`,
|
||||
`${JSON.stringify(templatePackageJson, null, 2)}\n`,
|
||||
);
|
||||
|
||||
// create sandbox.config.json file at the root of template
|
||||
// Create sandbox/stackblitz config file at the root of template
|
||||
const codeSandboxConfig = {
|
||||
infiniteLoopProtection: true,
|
||||
hardReloadOnChange: true,
|
||||
|
@ -162,7 +160,7 @@ console.log(`
|
|||
# Generate examples start!
|
||||
`);
|
||||
|
||||
// delete the examples directories if they exists
|
||||
// Delete the examples directories if they exist
|
||||
console.log(`-------
|
||||
## Removing example folders...
|
||||
`);
|
||||
|
@ -170,7 +168,7 @@ await fs.rm('./examples/classic', {recursive: true, force: true});
|
|||
await fs.rm('./examples/classic-typescript', {recursive: true, force: true});
|
||||
await fs.rm('./examples/facebook', {recursive: true, force: true});
|
||||
|
||||
// get the list of all available templates
|
||||
// Get the list of all available templates
|
||||
console.log(`
|
||||
-------
|
||||
## Generate example folders...
|
||||
|
@ -187,7 +185,7 @@ console.log('Committing changes');
|
|||
shell.exec('git add examples');
|
||||
shell.exec("git commit -am 'update examples' --allow-empty");
|
||||
|
||||
// update starters
|
||||
// Update starters
|
||||
console.log(`
|
||||
-------
|
||||
# Updating starter repos and branches ...
|
||||
|
|
7
jest/snapshotPathNormalizer.ts
vendored
7
jest/snapshotPathNormalizer.ts
vendored
|
@ -90,7 +90,7 @@ function normalizePaths<T>(value: T): T {
|
|||
(val) => val.split(homeDirReal).join('<HOME_DIR>'),
|
||||
(val) => val.split(homeDir).join('<HOME_DIR>'),
|
||||
|
||||
// handle HOME_DIR nested inside TEMP_DIR
|
||||
// Handle HOME_DIR nested inside TEMP_DIR
|
||||
(val) =>
|
||||
val
|
||||
.split(`<TEMP_DIR>${path.sep + homeRelativeToTemp}`)
|
||||
|
@ -98,7 +98,7 @@ function normalizePaths<T>(value: T): T {
|
|||
(val) =>
|
||||
val
|
||||
.split(`<TEMP_DIR>${path.sep + homeRelativeToTempReal}`)
|
||||
.join('<HOME_DIR>'), // untested
|
||||
.join('<HOME_DIR>'),
|
||||
(val) =>
|
||||
val
|
||||
.split(`<TEMP_DIR>${path.sep + homeRealRelativeToTempReal}`)
|
||||
|
@ -106,7 +106,7 @@ function normalizePaths<T>(value: T): T {
|
|||
(val) =>
|
||||
val
|
||||
.split(`<TEMP_DIR>${path.sep + homeRealRelativeToTemp}`)
|
||||
.join('<HOME_DIR>'), // untested
|
||||
.join('<HOME_DIR>'),
|
||||
|
||||
// Replace the Docusaurus version with a stub
|
||||
(val) => val.split(version).join('<CURRENT_VERSION>'),
|
||||
|
@ -134,7 +134,6 @@ function normalizePaths<T>(value: T): T {
|
|||
}
|
||||
|
||||
function shouldUpdate(value: unknown) {
|
||||
// return true if value is different from normalized value
|
||||
return typeof value === 'string' && normalizePaths(value) !== value;
|
||||
}
|
||||
|
||||
|
|
|
@ -79,7 +79,7 @@ export default function plugin(options: PluginOptions = {}): Transformer {
|
|||
visit(root, 'heading', (child: Heading, index, parent) => {
|
||||
const value = toString(child);
|
||||
|
||||
// depth:1 headings are titles and not included in the TOC
|
||||
// depth: 1 headings are titles and not included in the TOC
|
||||
if (parent !== root || !value || child.depth < 2) {
|
||||
return;
|
||||
}
|
||||
|
|
|
@ -103,7 +103,7 @@ async function getImageAbsolutePath(
|
|||
await ensureImageFileExist(imageFilePath, filePath);
|
||||
return imageFilePath;
|
||||
} else if (path.isAbsolute(imagePath)) {
|
||||
// absolute paths are expected to exist in the static folder
|
||||
// Absolute paths are expected to exist in the static folder.
|
||||
const possiblePaths = staticDirs.map((dir) => path.join(dir, imagePath));
|
||||
const imageFilePath = await findAsyncSequential(
|
||||
possiblePaths,
|
||||
|
@ -120,7 +120,7 @@ async function getImageAbsolutePath(
|
|||
}
|
||||
return imageFilePath;
|
||||
}
|
||||
// relative paths are resolved against the source file's folder
|
||||
// Relative paths are resolved against the source file's folder.
|
||||
const imageFilePath = path.join(
|
||||
path.dirname(filePath),
|
||||
decodeURIComponent(imagePath),
|
||||
|
|
|
@ -34,7 +34,9 @@ type Context = PluginOptions & {
|
|||
filePath: string;
|
||||
};
|
||||
|
||||
// transform the link node to a jsx link with a require() call
|
||||
/**
|
||||
* Transforms the link node to a JSX `<a>` element with a `require()` call.
|
||||
*/
|
||||
function toAssetRequireNode(node: Link, assetPath: string, filePath: string) {
|
||||
const jsxNode = node as Literal & Partial<Link>;
|
||||
let relativeAssetPath = posixPath(
|
||||
|
@ -106,7 +108,7 @@ async function getAssetAbsolutePath(
|
|||
|
||||
async function processLinkNode(node: Link, context: Context) {
|
||||
if (!node.url) {
|
||||
// try to improve error feedback
|
||||
// Try to improve error feedback
|
||||
// see https://github.com/facebook/docusaurus/issues/3309#issuecomment-690371675
|
||||
const title = node.title || (node.children[0] as Literal)?.value || '?';
|
||||
const line = node?.position?.start?.line || '?';
|
||||
|
|
|
@ -207,7 +207,7 @@ export function createConfigFile({
|
|||
>): VersionTwoConfig {
|
||||
const siteConfig = v1Config;
|
||||
const customConfigFields: {[key: string]: unknown} = {};
|
||||
// add fields that are unknown to v2 to customConfigFields
|
||||
// Add fields that are unknown to v2 to customConfigFields
|
||||
Object.keys(siteConfig).forEach((key) => {
|
||||
const knownFields = [
|
||||
'title',
|
||||
|
|
|
@ -152,9 +152,7 @@ declare module '@docusaurus/Link' {
|
|||
readonly href?: string;
|
||||
readonly autoAddBaseUrl?: boolean;
|
||||
|
||||
/**
|
||||
* escape hatch in case broken links check is annoying for a specific link
|
||||
*/
|
||||
/** Escape hatch in case broken links check doesn't make sense. */
|
||||
readonly 'data-noBrokenLinkCheck'?: boolean;
|
||||
};
|
||||
export default function Link(props: Props): JSX.Element;
|
||||
|
|
|
@ -96,8 +96,8 @@ function filterUnwantedRedirects(
|
|||
redirects: RedirectMetadata[],
|
||||
pluginContext: PluginContext,
|
||||
): RedirectMetadata[] {
|
||||
// we don't want to create twice the same redirect
|
||||
// that would lead to writing twice the same html redirection file
|
||||
// We don't want to create the same redirect twice, since that would lead to
|
||||
// writing the same html redirection file twice.
|
||||
Object.entries(_.groupBy(redirects, (redirect) => redirect.from)).forEach(
|
||||
([from, groupedFromRedirects]) => {
|
||||
if (groupedFromRedirects.length > 1) {
|
||||
|
|
|
@ -158,7 +158,6 @@ describe('blog plugin', () => {
|
|||
readingTime: 0.015,
|
||||
source: path.posix.join(
|
||||
'@site',
|
||||
// pluginPath,
|
||||
path.posix.join('i18n', 'en', 'docusaurus-plugin-content-blog'),
|
||||
'2018-12-14-Happy-First-Birthday-Slash.md',
|
||||
),
|
||||
|
@ -421,7 +420,7 @@ describe('blog plugin', () => {
|
|||
const blogPosts = await getBlogPosts(siteDir);
|
||||
const noDateSource = path.posix.join('@site', PluginPath, 'no date.md');
|
||||
const noDateSourceFile = path.posix.join(siteDir, PluginPath, 'no date.md');
|
||||
// we know the file exist and we know we have git
|
||||
// We know the file exists and we know we have git
|
||||
const result = getFileCommitDate(noDateSourceFile, {age: 'oldest'});
|
||||
const noDateSourceTime = result.date;
|
||||
const formattedDate = Intl.DateTimeFormat('en', {
|
||||
|
|
|
@ -13,7 +13,7 @@ function testValidate(options: Options) {
|
|||
return validateOptions({validate: normalizePluginOptions, options});
|
||||
}
|
||||
|
||||
// the type of remark/rehype plugins can be either function, object or array
|
||||
// The type of remark/rehype plugins can be either function, object or array
|
||||
const markdownPluginsFunctionStub = () => {};
|
||||
const markdownPluginsObjectStub = {};
|
||||
|
||||
|
|
|
@ -218,7 +218,7 @@ export default async function pluginContentBlog(
|
|||
routeBasePath,
|
||||
archiveBasePath,
|
||||
]);
|
||||
// creates a blog archive route
|
||||
// Create a blog archive route
|
||||
const archiveProp = await createData(
|
||||
`${docuHash(archiveUrl)}.json`,
|
||||
JSON.stringify({blogPosts}, null, 2),
|
||||
|
|
|
@ -111,7 +111,7 @@ const PluginOptionSchema = Joi.object<PluginOptions>({
|
|||
.default(DEFAULT_OPTIONS.feedOptions.type),
|
||||
title: Joi.string().allow(''),
|
||||
description: Joi.string().allow(''),
|
||||
// only add default value when user actually wants a feed (type is not null)
|
||||
// Only add default value when user actually wants a feed (type is not null)
|
||||
copyright: Joi.when('type', {
|
||||
is: Joi.any().valid(null),
|
||||
then: Joi.string().optional(),
|
||||
|
|
|
@ -849,7 +849,6 @@ describe('versioned site', () => {
|
|||
const {siteDir, context, options, version100} = await loadSite({
|
||||
options: {
|
||||
editUrl: 'https://github.com/facebook/docusaurus/edit/main/website',
|
||||
// editCurrentVersion: true,
|
||||
},
|
||||
});
|
||||
|
||||
|
|
|
@ -68,8 +68,8 @@ const createFakeActions = (contentDir: string) => {
|
|||
},
|
||||
};
|
||||
|
||||
// query by prefix, because files have a hash at the end
|
||||
// so it's not convenient to query by full filename
|
||||
// Query by prefix, because files have a hash at the end so it's not
|
||||
// convenient to query by full filename
|
||||
const getCreatedDataByPrefix = (prefix: string) => {
|
||||
const entry = Object.entries(dataContainer).find(([key]) =>
|
||||
key.startsWith(prefix),
|
||||
|
|
|
@ -23,7 +23,7 @@ const IgnoredNumberPrefixPatterns = [
|
|||
'00abc01-My Doc',
|
||||
'My 001- Doc',
|
||||
'My -001 Doc',
|
||||
// ignore common date-like patterns: https://github.com/facebook/docusaurus/issues/4640
|
||||
// Ignore common date-like patterns: https://github.com/facebook/docusaurus/issues/4640
|
||||
'2021-01-31 - Doc',
|
||||
'31-01-2021 - Doc',
|
||||
'2021_01_31 - Doc',
|
||||
|
@ -36,7 +36,7 @@ const IgnoredNumberPrefixPatterns = [
|
|||
'01-2021 - Doc',
|
||||
'01_2021 - Doc',
|
||||
'01.2021 - Doc',
|
||||
// date patterns without suffix
|
||||
// Date patterns without suffix
|
||||
'2021-01-31',
|
||||
'2021-01',
|
||||
'21-01-31',
|
||||
|
@ -49,7 +49,7 @@ const IgnoredNumberPrefixPatterns = [
|
|||
'01',
|
||||
'2021',
|
||||
'01',
|
||||
// ignore common versioning patterns: https://github.com/facebook/docusaurus/issues/4653
|
||||
// Ignore common versioning patterns: https://github.com/facebook/docusaurus/issues/4653
|
||||
'8.0',
|
||||
'8.0.0',
|
||||
'14.2.16',
|
||||
|
|
|
@ -15,7 +15,7 @@ import {
|
|||
import {GlobExcludeDefault} from '@docusaurus/utils';
|
||||
import type {Options} from '@docusaurus/plugin-content-docs';
|
||||
|
||||
// the type of remark/rehype plugins is function
|
||||
// The type of remark/rehype plugins can be function/object
|
||||
const markdownPluginsFunctionStub = () => {};
|
||||
const markdownPluginsObjectStub = {};
|
||||
|
||||
|
|
|
@ -57,7 +57,7 @@ describe('toTagDocListProp', () => {
|
|||
count: 2,
|
||||
label: tag.label,
|
||||
permalink: tag.permalink,
|
||||
items: [doc3, doc1], // docs sorted by title, ignore "id5" absence
|
||||
items: [doc3, doc1], // Docs sorted by title, ignore "id5" absence
|
||||
});
|
||||
});
|
||||
});
|
||||
|
|
|
@ -226,7 +226,7 @@ describe('docsClientUtils', () => {
|
|||
],
|
||||
};
|
||||
|
||||
// shuffle, because order shouldn't matter
|
||||
// Shuffle, because order shouldn't matter
|
||||
const versions: GlobalVersion[] = _.shuffle([
|
||||
versionNext,
|
||||
version2,
|
||||
|
@ -355,7 +355,7 @@ describe('docsClientUtils', () => {
|
|||
],
|
||||
};
|
||||
|
||||
// shuffle, because order shouldn't matter
|
||||
// Shuffle, because order shouldn't matter
|
||||
const versions: GlobalVersion[] = _.shuffle([
|
||||
versionNext,
|
||||
version2,
|
||||
|
@ -395,7 +395,7 @@ describe('docsClientUtils', () => {
|
|||
latestVersionSuggestion: version2,
|
||||
});
|
||||
expect(getDocVersionSuggestions(data, '/docs/version1/doc2')).toEqual({
|
||||
latestDocSuggestion: undefined, // because /docs/version1/doc2 does not exist
|
||||
latestDocSuggestion: undefined, // Because /docs/version1/doc2 does not exist
|
||||
latestVersionSuggestion: version2,
|
||||
});
|
||||
});
|
||||
|
|
|
@ -156,15 +156,13 @@ function doProcessDocMetadata({
|
|||
parse_number_prefixes: parseNumberPrefixes = true,
|
||||
} = frontMatter;
|
||||
|
||||
// ex: api/plugins/myDoc -> myDoc
|
||||
// ex: myDoc -> myDoc
|
||||
// E.g. api/plugins/myDoc -> myDoc; myDoc -> myDoc
|
||||
const sourceFileNameWithoutExtension = path.basename(
|
||||
source,
|
||||
path.extname(source),
|
||||
);
|
||||
|
||||
// ex: api/plugins/myDoc -> api/plugins
|
||||
// ex: myDoc -> .
|
||||
// E.g. api/plugins/myDoc -> api/plugins; myDoc -> .
|
||||
const sourceDirName = path.dirname(source);
|
||||
|
||||
const {filename: unprefixedFileName, numberPrefix} = parseNumberPrefixes
|
||||
|
@ -347,7 +345,7 @@ export function addDocNavigation(
|
|||
}
|
||||
|
||||
const docsWithNavigation = docsBase.map(addNavData);
|
||||
// sort to ensure consistent output for tests
|
||||
// Sort to ensure consistent output for tests
|
||||
docsWithNavigation.sort((a, b) => a.id.localeCompare(b.id));
|
||||
return docsWithNavigation;
|
||||
}
|
||||
|
@ -434,7 +432,7 @@ export function getDocIds(doc: DocMetadataBase): [string, string] {
|
|||
return [doc.unversionedId, doc.id];
|
||||
}
|
||||
|
||||
// docs are indexed by both versioned and unversioned ids at the same time
|
||||
// Docs are indexed by both versioned and unversioned ids at the same time
|
||||
// TODO legacy retro-compatibility due to old versioned sidebars using
|
||||
// versioned doc ids ("id" should be removed & "versionedId" should be renamed
|
||||
// to "id")
|
||||
|
|
|
@ -20,12 +20,14 @@ import type {DocFrontMatter} from '@docusaurus/plugin-content-docs';
|
|||
// We use default values in code instead
|
||||
const DocFrontMatterSchema = Joi.object<DocFrontMatter>({
|
||||
id: Joi.string(),
|
||||
title: Joi.string().allow(''), // see https://github.com/facebook/docusaurus/issues/4591#issuecomment-822372398
|
||||
// See https://github.com/facebook/docusaurus/issues/4591#issuecomment-822372398
|
||||
title: Joi.string().allow(''),
|
||||
hide_title: Joi.boolean(),
|
||||
hide_table_of_contents: Joi.boolean(),
|
||||
keywords: Joi.array().items(Joi.string().required()),
|
||||
image: URISchema,
|
||||
description: Joi.string().allow(''), // see https://github.com/facebook/docusaurus/issues/4591#issuecomment-822372398
|
||||
// See https://github.com/facebook/docusaurus/issues/4591#issuecomment-822372398
|
||||
description: Joi.string().allow(''),
|
||||
slug: Joi.string(),
|
||||
sidebar_label: Joi.string(),
|
||||
sidebar_position: Joi.number(),
|
||||
|
|
|
@ -19,7 +19,7 @@ declare module '@docusaurus/plugin-content-docs' {
|
|||
* Custom callback for parsing number prefixes from file/folder names.
|
||||
*/
|
||||
export type NumberPrefixParser = (filename: string) => {
|
||||
/** file name without number prefix, without any other modification. */
|
||||
/** File name without number prefix, without any other modification. */
|
||||
filename: string;
|
||||
/** The number prefix. Can be float, integer, negative, or anything. */
|
||||
numberPrefix?: number;
|
||||
|
@ -621,9 +621,9 @@ declare module '@docusaurus/plugin-content-docs/client' {
|
|||
breadcrumbs: boolean;
|
||||
};
|
||||
export type DocVersionSuggestions = {
|
||||
/** suggest the latest version */
|
||||
/** Suggest the latest version */
|
||||
latestVersionSuggestion: GlobalVersion;
|
||||
/** suggest the same doc, in latest version (if exist) */
|
||||
/** Suggest the same doc, in latest version (if one exists) */
|
||||
latestDocSuggestion?: GlobalDoc;
|
||||
};
|
||||
|
||||
|
|
|
@ -139,11 +139,9 @@ export async function createVersionRoutes({
|
|||
|
||||
actions.addRoute({
|
||||
path: version.path,
|
||||
// allow matching /docs/* as well
|
||||
// Allow matching /docs/* since this is the wrapping route
|
||||
exact: false,
|
||||
// main docs component (DocPage)
|
||||
component: docLayoutComponent,
|
||||
// sub-routes for each doc
|
||||
routes: await createVersionSubRoutes(),
|
||||
modules: {
|
||||
versionMetadata: aliasedSource(versionMetadataPropPath),
|
||||
|
|
|
@ -76,9 +76,9 @@ Available doc IDs:
|
|||
return (
|
||||
// Doc at the root of the autogenerated sidebar dir
|
||||
doc.sourceDirName === autogenDir ||
|
||||
// autogen dir is . and doc is in subfolder
|
||||
// Autogen dir is . and doc is in subfolder
|
||||
autogenDir === '.' ||
|
||||
// autogen dir is not . and doc is in subfolder
|
||||
// Autogen dir is not . and doc is in subfolder
|
||||
// "api/myDoc" startsWith "api/" (note "api2/myDoc" is not included)
|
||||
doc.sourceDirName.startsWith(addTrailingSlash(autogenDir))
|
||||
);
|
||||
|
|
|
@ -22,7 +22,7 @@ function normalizeCategoryLink(
|
|||
params: SidebarProcessorParams,
|
||||
): SidebarItemCategoryLink | undefined {
|
||||
if (category.link?.type === 'generated-index') {
|
||||
// default slug logic can be improved
|
||||
// Default slug logic can be improved
|
||||
const getDefaultSlug = () =>
|
||||
`/category/${params.categoryLabelSlugger.slug(category.label)}`;
|
||||
const slug = category.link.slug ?? getDefaultSlug();
|
||||
|
|
|
@ -180,7 +180,6 @@ export type PropSidebarItemCategory = Expand<
|
|||
}
|
||||
>;
|
||||
|
||||
// we may want to use a union type in props instead of this generic link?
|
||||
export type PropSidebarItemLink = SidebarItemLink & {
|
||||
docId?: string;
|
||||
};
|
||||
|
@ -245,7 +244,7 @@ export type SidebarItemsGeneratorArgs = {
|
|||
/** The default category index matcher which you can override. */
|
||||
isCategoryIndex: CategoryIndexMatcher;
|
||||
/**
|
||||
* key is the path relative to the doc content directory, value is the
|
||||
* Key is the path relative to the doc content directory, value is the
|
||||
* category metadata file's content.
|
||||
*/
|
||||
categoriesMetadata: {[filePath: string]: CategoryMetadataFile};
|
||||
|
|
|
@ -53,50 +53,6 @@ function getNormalizedSidebarName({
|
|||
return rest.join('/');
|
||||
}
|
||||
|
||||
/*
|
||||
// Do we need to translate doc metadata?
|
||||
// It seems translating front matter labels is good enough
|
||||
function getDocTranslations(doc: DocMetadata): TranslationFileContent {
|
||||
return {
|
||||
[`${doc.unversionedId}.title`]: {
|
||||
message: doc.title,
|
||||
description: `The title for doc with id=${doc.unversionedId}`,
|
||||
},
|
||||
...(doc.sidebar_label
|
||||
? {
|
||||
[`${doc.unversionedId}.sidebar_label`]: {
|
||||
message: doc.sidebar_label,
|
||||
description:
|
||||
`The sidebar label for doc with id=${doc.unversionedId}`,
|
||||
},
|
||||
}
|
||||
: undefined),
|
||||
};
|
||||
}
|
||||
function translateDoc(
|
||||
doc: DocMetadata,
|
||||
docsTranslations: TranslationFileContent,
|
||||
): DocMetadata {
|
||||
return {
|
||||
...doc,
|
||||
title: docsTranslations[`${doc.unversionedId}.title`]?.message ?? doc.title,
|
||||
sidebar_label:
|
||||
docsTranslations[`${doc.unversionedId}.sidebar_label`]?.message ??
|
||||
doc.sidebar_label,
|
||||
};
|
||||
}
|
||||
|
||||
function getDocsTranslations(version: LoadedVersion): TranslationFileContent {
|
||||
return mergeTranslations(version.docs.map(getDocTranslations));
|
||||
}
|
||||
function translateDocs(
|
||||
docs: DocMetadata[],
|
||||
docsTranslations: TranslationFileContent,
|
||||
): DocMetadata[] {
|
||||
return docs.map((doc) => translateDoc(doc, docsTranslations));
|
||||
}
|
||||
*/
|
||||
|
||||
function getSidebarTranslationFileContent(
|
||||
sidebar: Sidebar,
|
||||
sidebarName: string,
|
||||
|
@ -252,17 +208,10 @@ function getVersionTranslationFiles(version: LoadedVersion): TranslationFile[] {
|
|||
const sidebarsTranslations: TranslationFileContent =
|
||||
getSidebarsTranslations(version);
|
||||
|
||||
// const docsTranslations: TranslationFileContent =
|
||||
// getDocsTranslations(version);
|
||||
|
||||
return [
|
||||
{
|
||||
path: getVersionFileName(version.versionName),
|
||||
content: mergeTranslations([
|
||||
versionTranslations,
|
||||
sidebarsTranslations,
|
||||
// docsTranslations,
|
||||
]),
|
||||
content: mergeTranslations([versionTranslations, sidebarsTranslations]),
|
||||
},
|
||||
];
|
||||
}
|
||||
|
@ -276,7 +225,6 @@ function translateVersion(
|
|||
...version,
|
||||
label: versionTranslations['version.label']?.message ?? version.label,
|
||||
sidebars: translateSidebars(version, versionTranslations),
|
||||
// docs: translateDocs(version.docs, versionTranslations),
|
||||
};
|
||||
}
|
||||
|
||||
|
|
|
@ -30,7 +30,7 @@ export type SourceToPermalink = {
|
|||
};
|
||||
|
||||
export type VersionTag = Tag & {
|
||||
/** all doc ids having this tag. */
|
||||
/** All doc ids having this tag. */
|
||||
docIds: string[];
|
||||
};
|
||||
export type VersionTags = {
|
||||
|
|
|
@ -38,7 +38,7 @@ function PluginContent({
|
|||
<h3>{pluginName}</h3>
|
||||
<div>
|
||||
{Object.entries(pluginContent)
|
||||
// filter plugin instances with no content
|
||||
// Filter plugin instances with no content
|
||||
.filter(([, pluginInstanceContent]) => !!pluginInstanceContent)
|
||||
.map(([pluginId, pluginInstanceContent]) => (
|
||||
<PluginInstanceContent
|
||||
|
@ -58,7 +58,7 @@ export default function DebugContent({allContent}: Props): JSX.Element {
|
|||
<h2>Plugin content</h2>
|
||||
<div>
|
||||
{Object.entries(allContent)
|
||||
// filter plugins with no content
|
||||
// Filter plugins with no content
|
||||
.filter(([, pluginContent]) =>
|
||||
Object.values(pluginContent).some(
|
||||
(instanceContent) => !!instanceContent,
|
||||
|
|
|
@ -10,7 +10,7 @@ import BrowserOnly from '@docusaurus/BrowserOnly';
|
|||
import type {Props} from '@theme/DebugJsonView';
|
||||
import type {ReactJsonViewProps} from 'react-json-view';
|
||||
|
||||
// avoids "react-json-view" to display "root"
|
||||
// Avoids "react-json-view" to display "root"
|
||||
const RootName = null;
|
||||
|
||||
// Seems ReactJson does not work with SSR
|
||||
|
|
|
@ -59,7 +59,8 @@ export default function pluginIdealImage(
|
|||
{
|
||||
loader: require.resolve('@docusaurus/responsive-loader'),
|
||||
options: {
|
||||
emitFile: !isServer, // don't emit for server-side rendering
|
||||
// Don't emit for server-side rendering
|
||||
emitFile: !isServer,
|
||||
// eslint-disable-next-line global-require
|
||||
adapter: require('@docusaurus/responsive-loader/sharp'),
|
||||
name: 'assets/ideal-img/[name].[hash:hex:7].[width].[ext]',
|
||||
|
|
|
@ -40,7 +40,7 @@ function getMessage(icon: IconKey, state: State) {
|
|||
description: 'When the full-scale image is loading',
|
||||
});
|
||||
case 'load': {
|
||||
// we can show `alt` here
|
||||
// We can show `alt` here
|
||||
const {pickedSrc} = state;
|
||||
const {size} = pickedSrc;
|
||||
const sizeMessage = size ? ` (${bytesToSize(size)})` : '';
|
||||
|
|
|
@ -150,7 +150,7 @@ export default function pluginPWA(
|
|||
optimization: {
|
||||
splitChunks: false,
|
||||
minimize: !debug,
|
||||
// see https://developers.google.com/web/tools/workbox/guides/using-bundlers#webpack
|
||||
// See https://developers.google.com/web/tools/workbox/guides/using-bundlers#webpack
|
||||
minimizer: debug
|
||||
? []
|
||||
: [
|
||||
|
@ -161,7 +161,8 @@ export default function pluginPWA(
|
|||
},
|
||||
plugins: [
|
||||
new webpack.EnvironmentPlugin({
|
||||
PWA_SW_CUSTOM: swCustom || '', // fallback value required with Webpack 5
|
||||
// Fallback value required with Webpack 5
|
||||
PWA_SW_CUSTOM: swCustom || '',
|
||||
}),
|
||||
new LogPlugin({
|
||||
name: 'Service Worker',
|
||||
|
@ -192,7 +193,7 @@ export default function pluginPWA(
|
|||
// @ts-expect-error: internal API?
|
||||
...(injectManifest.globPatterns ?? []),
|
||||
],
|
||||
// those attributes are not overrideable
|
||||
// Those attributes are not overrideable
|
||||
swDest,
|
||||
swSrc: swDest,
|
||||
globDirectory: props.outDir,
|
||||
|
|
|
@ -81,6 +81,8 @@ declare module '@docusaurus/plugin-pwa' {
|
|||
*/
|
||||
swRegister: string | false;
|
||||
};
|
||||
|
||||
export type Options = Partial<PluginOptions>;
|
||||
}
|
||||
|
||||
declare module '@theme/PwaReloadPopup' {
|
||||
|
|
|
@ -16,7 +16,7 @@ const PWA_OFFLINE_MODE_ACTIVATION_STRATEGIES =
|
|||
const PWA_DEBUG = process.env.PWA_DEBUG;
|
||||
/* eslint-enable prefer-destructuring */
|
||||
|
||||
const debug = PWA_DEBUG; // shortcut
|
||||
const debug = PWA_DEBUG; // Shortcut
|
||||
|
||||
const MAX_MOBILE_WIDTH = 940;
|
||||
|
||||
|
@ -101,7 +101,7 @@ async function getActiveStrategies() {
|
|||
return isActive ? strategyName : undefined;
|
||||
}),
|
||||
);
|
||||
return activeStrategies.filter(Boolean); // remove undefined values
|
||||
return activeStrategies.filter(Boolean);
|
||||
}
|
||||
|
||||
async function isOfflineModeEnabled() {
|
||||
|
|
|
@ -18,7 +18,7 @@ function parseSwParams() {
|
|||
return params;
|
||||
}
|
||||
|
||||
// doc advises against dynamic imports in SW
|
||||
// Doc advises against dynamic imports in SW
|
||||
// https://developers.google.com/web/tools/workbox/guides/using-bundlers#code_splitting_and_dynamic_imports
|
||||
// https://twitter.com/sebastienlorber/status/1280155204575518720
|
||||
// but looks it's working fine as it's inlined by webpack, need to double check
|
||||
|
@ -73,7 +73,8 @@ function getPossibleURLs(url) {
|
|||
// eslint-disable-next-line no-underscore-dangle
|
||||
const precacheManifest = self.__WB_MANIFEST;
|
||||
const controller = new PrecacheController({
|
||||
fallbackToNetwork: true, // safer to turn this true?
|
||||
// Safer to turn this true?
|
||||
fallbackToNetwork: true,
|
||||
});
|
||||
|
||||
if (params.offlineMode) {
|
||||
|
|
|
@ -6,7 +6,7 @@
|
|||
*/
|
||||
|
||||
import remark from 'remark';
|
||||
// import from the transpiled lib because Babel can't transpile `export =`
|
||||
// Import from the transpiled lib because Babel can't transpile `export =`
|
||||
// TODO change to `../index` after migrating to ESM
|
||||
import npm2yarn from '../../lib/index';
|
||||
import vfile from 'to-vfile';
|
||||
|
|
|
@ -125,7 +125,7 @@ describe('getTranslationFiles and translateThemeConfig isomorphism', () => {
|
|||
verifyIsomorphism(ThemeConfigSampleSimpleFooter);
|
||||
});
|
||||
|
||||
// undefined footer should not make the translation code crash
|
||||
// Undefined footer should not make the translation code crash
|
||||
// See https://github.com/facebook/docusaurus/issues/3936
|
||||
it('is verified for sample without footer', () => {
|
||||
verifyIsomorphism({...ThemeConfigSample, footer: undefined});
|
||||
|
|
|
@ -30,7 +30,7 @@ export default function getSwizzleConfig(): SwizzleConfig {
|
|||
},
|
||||
DocSidebar: {
|
||||
actions: {
|
||||
eject: 'unsafe', // too much technical code in sidebar, not very safe atm
|
||||
eject: 'unsafe', // Too much technical code in sidebar, not very safe atm
|
||||
wrap: 'safe',
|
||||
},
|
||||
description: 'The sidebar component on docs pages',
|
||||
|
@ -234,7 +234,7 @@ export default function getSwizzleConfig(): SwizzleConfig {
|
|||
'prism-include-languages': {
|
||||
actions: {
|
||||
eject: 'safe',
|
||||
wrap: 'forbidden', // not a component!
|
||||
wrap: 'forbidden', // Not a component!
|
||||
},
|
||||
description:
|
||||
'The Prism languages to include for code block syntax highlighting. Meant to be ejected.',
|
||||
|
|
|
@ -1022,8 +1022,8 @@ declare module '@theme/TOCItems' {
|
|||
declare module '@theme/TOC' {
|
||||
import type {TOCItem} from '@docusaurus/types';
|
||||
|
||||
// minHeadingLevel only exists as a per-doc option, and won't have a default
|
||||
// set by Joi. See TOC, TOCInline, TOCCollapsible for examples
|
||||
// `minHeadingLevel` only comes from doc/post front matter, and won't have a
|
||||
// default set by Joi. See TOC, TOCInline, TOCCollapsible for examples.
|
||||
export interface Props {
|
||||
readonly toc: readonly TOCItem[];
|
||||
readonly minHeadingLevel?: number;
|
||||
|
|
|
@ -111,8 +111,8 @@ export default function DocSidebarItemCategory({
|
|||
const isCurrentPage = isSamePath(href, activePath);
|
||||
|
||||
const {collapsed, setCollapsed} = useCollapsible({
|
||||
// active categories are always initialized as expanded
|
||||
// the default (item.collapsed) is only used for non-active categories
|
||||
// Active categories are always initialized as expanded. The default
|
||||
// (`item.collapsed`) is only used for non-active categories.
|
||||
initialState: () => {
|
||||
if (!collapsible) {
|
||||
return false;
|
||||
|
|
|
@ -17,12 +17,7 @@ function DocSidebarItems({items, ...props}: Props): JSX.Element {
|
|||
return (
|
||||
<DocSidebarItemsExpandedStateProvider>
|
||||
{items.map((item, index) => (
|
||||
<DocSidebarItem
|
||||
key={index} // sidebar is static, the index does not change
|
||||
item={item}
|
||||
index={index}
|
||||
{...props}
|
||||
/>
|
||||
<DocSidebarItem key={index} item={item} index={index} {...props} />
|
||||
))}
|
||||
</DocSidebarItemsExpandedStateProvider>
|
||||
);
|
||||
|
|
|
@ -136,8 +136,8 @@ function DocVersionBannerEnabled({
|
|||
const {latestDocSuggestion, latestVersionSuggestion} =
|
||||
useDocVersionSuggestions(pluginId);
|
||||
|
||||
// try to link to same doc in latest version (not always possible)
|
||||
// fallback to main doc of latest version
|
||||
// Try to link to same doc in latest version (not always possible), falling
|
||||
// back to main doc of latest version
|
||||
const latestVersionSuggestedDoc =
|
||||
latestDocSuggestion ?? getVersionMainDoc(latestVersionSuggestion);
|
||||
|
||||
|
|
|
@ -13,13 +13,13 @@ import {useThemeConfig} from '@docusaurus/theme-common';
|
|||
|
||||
import styles from './styles.module.css';
|
||||
|
||||
function AnchorHeading({as: As, id, ...props}: Props) {
|
||||
export default function Heading({as: As, id, ...props}: Props): JSX.Element {
|
||||
const {
|
||||
navbar: {hideOnScroll},
|
||||
} = useThemeConfig();
|
||||
|
||||
if (!id) {
|
||||
return <As {...props} />;
|
||||
// H1 headings do not need an id because they don't appear in the TOC.
|
||||
if (As === 'h1' || !id) {
|
||||
return <As {...props} id={undefined} />;
|
||||
}
|
||||
|
||||
return (
|
||||
|
@ -46,17 +46,3 @@ function AnchorHeading({as: As, id, ...props}: Props) {
|
|||
</As>
|
||||
);
|
||||
}
|
||||
|
||||
export default function Heading({as, ...props}: Props): JSX.Element {
|
||||
if (as === 'h1') {
|
||||
return (
|
||||
<h1
|
||||
{...props}
|
||||
id={undefined} // h1 headings do not need an id because they don't appear in the TOC
|
||||
>
|
||||
{props.children}
|
||||
</h1>
|
||||
);
|
||||
}
|
||||
return <AnchorHeading as={as} {...props} />;
|
||||
}
|
||||
|
|
|
@ -27,7 +27,7 @@ export default function Layout(props: Props): JSX.Element {
|
|||
children,
|
||||
noFooter,
|
||||
wrapperClassName,
|
||||
// not really layout-related, but kept for convenience/retro-compatibility
|
||||
// Not really layout-related, but kept for convenience/retro-compatibility
|
||||
title,
|
||||
description,
|
||||
} = props;
|
||||
|
|
|
@ -109,16 +109,16 @@ export default function SiteMetadata(): JSX.Element {
|
|||
|
||||
<SearchMetadata tag={DEFAULT_SEARCH_TAG} locale={currentLocale} />
|
||||
|
||||
<Head
|
||||
// it's important to have an additional <Head> element here,
|
||||
// as it allows react-helmet to override values set in previous <Head>
|
||||
// ie we can override default metadata such as "twitter:card"
|
||||
// In same Head, the same meta would appear twice instead of overriding
|
||||
// See react-helmet doc
|
||||
>
|
||||
{/*
|
||||
It's important to have an additional <Head> element here, as it allows
|
||||
react-helmet to override default metadata values set in previous <Head>
|
||||
like "twitter:card". In same Head, the same meta would appear twice
|
||||
instead of overriding.
|
||||
*/}
|
||||
<Head>
|
||||
{/* Yes, "metadatum" is the grammatically correct term */}
|
||||
{metadata.map((metadatum, i) => (
|
||||
<meta key={`metadata_${i}`} {...metadatum} />
|
||||
<meta key={i} {...metadatum} />
|
||||
))}
|
||||
</Head>
|
||||
</>
|
||||
|
|
|
@ -100,7 +100,7 @@ const HtmlNavbarItemSchema = Joi.object({
|
|||
});
|
||||
|
||||
const itemWithType = (type: string | undefined) => {
|
||||
// because equal(undefined) is not supported :/
|
||||
// Because equal(undefined) is not supported :/
|
||||
const typeSchema = type
|
||||
? Joi.string().required().equal(type)
|
||||
: Joi.string().forbidden();
|
||||
|
@ -241,7 +241,6 @@ const ColorModeSchema = Joi.object({
|
|||
}),
|
||||
}).default(DEFAULT_COLOR_MODE_CONFIG);
|
||||
|
||||
// schema can probably be improved
|
||||
const HtmlMetadataSchema = Joi.object({
|
||||
id: Joi.string(),
|
||||
name: Joi.string(),
|
||||
|
|
|
@ -224,6 +224,8 @@ function CollapsibleBase({
|
|||
|
||||
function CollapsibleLazy({collapsed, ...props}: CollapsibleBaseProps) {
|
||||
const [mounted, setMounted] = useState(!collapsed);
|
||||
// Updated in effect so that first expansion transition can work
|
||||
const [lazyCollapsed, setLazyCollapsed] = useState(collapsed);
|
||||
|
||||
useLayoutEffect(() => {
|
||||
if (!collapsed) {
|
||||
|
@ -231,8 +233,6 @@ function CollapsibleLazy({collapsed, ...props}: CollapsibleBaseProps) {
|
|||
}
|
||||
}, [collapsed]);
|
||||
|
||||
// lazyCollapsed updated in effect so that first expansion transition can work
|
||||
const [lazyCollapsed, setLazyCollapsed] = useState(collapsed);
|
||||
useLayoutEffect(() => {
|
||||
if (mounted) {
|
||||
setLazyCollapsed(collapsed);
|
||||
|
|
|
@ -87,7 +87,8 @@ export function Details({
|
|||
setOpen(true);
|
||||
} else {
|
||||
setCollapsed(true);
|
||||
// setOpen(false); // Don't do this, it breaks close animation!
|
||||
// Don't do this, it breaks close animation!
|
||||
// setOpen(false);
|
||||
}
|
||||
}}>
|
||||
{summary}
|
||||
|
|
|
@ -72,7 +72,7 @@ function useContextValue(): ContextValue {
|
|||
|
||||
let viewedId = IdStorage.get();
|
||||
|
||||
// retrocompatibility due to spelling mistake of default id
|
||||
// Retrocompatibility due to spelling mistake of default id
|
||||
// see https://github.com/facebook/docusaurus/issues/3338
|
||||
// cSpell:ignore annoucement
|
||||
if (viewedId === 'annoucement-bar') {
|
||||
|
|
|
@ -58,9 +58,9 @@ function useContextValue(): ContextValue {
|
|||
useHistoryPopHandler(() => {
|
||||
if (shown) {
|
||||
setShown(false);
|
||||
// Should we prevent the navigation here?
|
||||
// Prevent pop navigation; seems desirable enough
|
||||
// See https://github.com/facebook/docusaurus/pull/5462#issuecomment-911699846
|
||||
return false; // prevent pop navigation
|
||||
return false;
|
||||
}
|
||||
return undefined;
|
||||
});
|
||||
|
|
|
@ -81,8 +81,8 @@ function getActiveAnchor(
|
|||
// https://github.com/facebook/docusaurus/issues/5318
|
||||
return anchors[anchors.indexOf(nextVisibleAnchor) - 1] ?? null;
|
||||
}
|
||||
// no anchor under viewport top? (ie we are at the bottom of the page)
|
||||
// => highlight the last anchor found
|
||||
// No anchor under viewport top (i.e. we are at the bottom of the page),
|
||||
// highlight the last anchor found
|
||||
return anchors[anchors.length - 1] ?? null;
|
||||
}
|
||||
|
||||
|
@ -140,7 +140,7 @@ export function useTOCHighlight(config: TOCHighlightConfig | undefined): void {
|
|||
|
||||
useEffect(() => {
|
||||
if (!config) {
|
||||
// no-op, highlighting is disabled
|
||||
// No-op, highlighting is disabled
|
||||
return () => {};
|
||||
}
|
||||
|
||||
|
|
|
@ -30,14 +30,14 @@ const magicCommentDirectives = [
|
|||
];
|
||||
|
||||
function getCommentPattern(languages: CommentType[]) {
|
||||
// to be more reliable, the opening and closing comment must match
|
||||
// To be more reliable, the opening and closing comment must match
|
||||
const commentPattern = languages
|
||||
.map((lang) => {
|
||||
const {start, end} = commentPatterns[lang];
|
||||
return `(?:${start}\\s*(${magicCommentDirectives.join('|')})\\s*${end})`;
|
||||
})
|
||||
.join('|');
|
||||
// white space is allowed, but otherwise it should be on it's own line
|
||||
// White space is allowed, but otherwise it should be on it's own line
|
||||
return new RegExp(`^\\s*(?:${commentPattern})\\s*$`);
|
||||
}
|
||||
|
||||
|
@ -70,7 +70,7 @@ function getAllMagicCommentDirectiveStyles(lang: string) {
|
|||
return getCommentPattern(['html', 'jsx', 'bash']);
|
||||
|
||||
default:
|
||||
// all comment types
|
||||
// All comment types
|
||||
return getCommentPattern(Object.keys(commentPatterns) as CommentType[]);
|
||||
}
|
||||
}
|
||||
|
@ -139,16 +139,15 @@ export function parseLines(
|
|||
return {highlightLines: [], code};
|
||||
}
|
||||
const directiveRegex = getAllMagicCommentDirectiveStyles(language);
|
||||
// go through line by line
|
||||
// Go through line by line
|
||||
const lines = code.split('\n');
|
||||
let highlightBlockStart: number;
|
||||
let highlightRange = '';
|
||||
// loop through lines
|
||||
for (let lineNumber = 0; lineNumber < lines.length; ) {
|
||||
const line = lines[lineNumber]!;
|
||||
const match = line.match(directiveRegex);
|
||||
if (!match) {
|
||||
// lines without directives are unchanged
|
||||
// Lines without directives are unchanged
|
||||
lineNumber += 1;
|
||||
continue;
|
||||
}
|
||||
|
|
|
@ -92,13 +92,8 @@ export function findFirstCategoryLink(
|
|||
if (categoryLink) {
|
||||
return categoryLink;
|
||||
}
|
||||
} else if (subItem.type === 'html') {
|
||||
// skip
|
||||
} else {
|
||||
throw new Error(
|
||||
`Unexpected category item type for ${JSON.stringify(subItem)}`,
|
||||
);
|
||||
}
|
||||
// Could be "html" items
|
||||
}
|
||||
return undefined;
|
||||
}
|
||||
|
@ -271,7 +266,7 @@ export function useLayoutDoc(
|
|||
const isDraft = versions
|
||||
.flatMap((version) => version.draftIds)
|
||||
.includes(docId);
|
||||
// drafts should be silently filtered instead of throwing
|
||||
// Drafts should be silently filtered instead of throwing
|
||||
if (isDraft) {
|
||||
return null;
|
||||
}
|
||||
|
|
|
@ -30,11 +30,11 @@ function treeifyTOC(flatTOC: readonly TOCItem[]): TOCTreeNode[] {
|
|||
const prevIndexForLevel = Array(7).fill(-1);
|
||||
|
||||
headings.forEach((curr, currIndex) => {
|
||||
// take the last seen index for each ancestor level. the highest
|
||||
// index will be the direct ancestor of the current heading.
|
||||
// Take the last seen index for each ancestor level. the highest index will
|
||||
// be the direct ancestor of the current heading.
|
||||
const ancestorLevelIndexes = prevIndexForLevel.slice(2, curr.level);
|
||||
curr.parentIndex = Math.max(...ancestorLevelIndexes);
|
||||
// mark that curr.level was last seen at the current index
|
||||
// Mark that curr.level was last seen at the current index.
|
||||
prevIndexForLevel[curr.level] = currIndex;
|
||||
});
|
||||
|
||||
|
|
|
@ -11,7 +11,7 @@ import {useContextualSearchFilters} from '@docusaurus/theme-common';
|
|||
export function useAlgoliaContextualFacetFilters(): [string, string[]] {
|
||||
const {locale, tags} = useContextualSearchFilters();
|
||||
|
||||
// seems safe to convert locale->language, see AlgoliaSearchMetadata comment
|
||||
// Seems safe to convert locale->language, see AlgoliaSearchMetadata comment
|
||||
const languageFilter = `language:${locale}`;
|
||||
|
||||
const tagsFilter = tags.map((tag) => `docusaurus_tag:${tag}`);
|
||||
|
|
|
@ -99,7 +99,7 @@ function DocSearch({
|
|||
: // ... or use config facetFilters
|
||||
configFacetFilters;
|
||||
|
||||
// we let user override default searchParameters if he wants to
|
||||
// We let user override default searchParameters if she wants to
|
||||
const searchParameters: DocSearchProps['searchParameters'] = {
|
||||
...props.searchParameters,
|
||||
facetFilters,
|
||||
|
|
|
@ -12,7 +12,7 @@ import type {
|
|||
} from '@docusaurus/types';
|
||||
|
||||
export const DEFAULT_CONFIG = {
|
||||
// enabled by default, as it makes sense in most cases
|
||||
// Enabled by default, as it makes sense in most cases
|
||||
// see also https://github.com/facebook/docusaurus/issues/5880
|
||||
contextualSearch: true,
|
||||
|
||||
|
|
|
@ -16,18 +16,19 @@ function getDefaultLocalesDirPath(): string {
|
|||
// Return an ordered list of locales we should try
|
||||
export function codeTranslationLocalesToTry(locale: string): string[] {
|
||||
const intlLocale = new Intl.Locale(locale);
|
||||
// if locale is just a simple language like "pt", we want to fallback to pt-BR
|
||||
// (not pt-PT!) See https://github.com/facebook/docusaurus/pull/4536#issuecomment-810088783
|
||||
const maximizedLocale = intlLocale.maximize(); // pt-Latn-BR
|
||||
// If locale is just a simple language like "pt", we want to fallback to
|
||||
// "pt-BR" (not "pt-PT"!)
|
||||
// See https://github.com/facebook/docusaurus/pull/4536#issuecomment-810088783
|
||||
const maximizedLocale = intlLocale.maximize(); // "pt-Latn-BR"
|
||||
return [
|
||||
// May be "zh", "zh-CN", "zh-Hans", "zh-cn", or anything: very likely to be
|
||||
// unresolved except for simply locales
|
||||
locale,
|
||||
// zh-CN / pt-BR
|
||||
// "zh-CN" / "pt-BR"
|
||||
`${maximizedLocale.language}-${maximizedLocale.region}`,
|
||||
// zh-Hans / pt-Latn
|
||||
// "zh-Hans" / "pt-Latn"
|
||||
`${maximizedLocale.language}-${maximizedLocale.script}`,
|
||||
// zh / pt
|
||||
// "zh" / "pt"
|
||||
maximizedLocale.language!,
|
||||
];
|
||||
}
|
||||
|
|
4
packages/docusaurus-types/src/index.d.ts
vendored
4
packages/docusaurus-types/src/index.d.ts
vendored
|
@ -313,9 +313,9 @@ export type Plugin<Content = unknown> = {
|
|||
name: string;
|
||||
loadContent?: () => Promise<Content> | Content;
|
||||
contentLoaded?: (args: {
|
||||
/** the content loaded by this plugin instance */
|
||||
/** The content loaded by this plugin instance */
|
||||
content: Content; //
|
||||
/** content loaded by ALL the plugins */
|
||||
/** Content loaded by ALL the plugins */
|
||||
allContent: AllContent;
|
||||
actions: PluginContentLoadedActions;
|
||||
}) => Promise<void> | void;
|
||||
|
|
|
@ -156,7 +156,6 @@ describe('localizePath', () => {
|
|||
currentLocale: 'en',
|
||||
localeConfigs: {},
|
||||
},
|
||||
// options: {localizePath: true},
|
||||
}),
|
||||
).toBe('/baseUrl/');
|
||||
});
|
||||
|
@ -172,7 +171,6 @@ describe('localizePath', () => {
|
|||
currentLocale: 'en',
|
||||
localeConfigs: {},
|
||||
},
|
||||
// options: {localizePath: true},
|
||||
}),
|
||||
).toBe('/baseUrl/');
|
||||
});
|
||||
|
|
|
@ -38,7 +38,7 @@ describe('createExcerpt', () => {
|
|||
Nunc porttitor libero nec vulputate venenatis. Nam nec rhoncus mauris. Morbi tempus est et nibh maximus, tempus venenatis arcu lobortis.
|
||||
`),
|
||||
).toBe(
|
||||
// h1 title is skipped on purpose, because we don't want the page to have
|
||||
// H1 title is skipped on purpose, because we don't want the page to have
|
||||
// SEO metadata title === description
|
||||
'Lorem ipsum dolor sit amet, consectetur adipiscing elit. Vestibulum ex urna, molestie et sagittis ut, varius ac justo.',
|
||||
);
|
||||
|
@ -56,7 +56,7 @@ describe('createExcerpt', () => {
|
|||
Nunc porttitor libero nec vulputate venenatis. Nam nec rhoncus mauris. Morbi tempus est et nibh maximus, tempus venenatis arcu lobortis.
|
||||
`),
|
||||
).toBe(
|
||||
// h1 title is skipped on purpose, because we don't want the page to have
|
||||
// H1 title is skipped on purpose, because we don't want the page to have
|
||||
// SEO metadata title === description
|
||||
'Lorem ipsum dolor sit amet, consectetur adipiscing elit. Vestibulum ex urna, molestie et sagittis ut, varius ac justo.',
|
||||
);
|
||||
|
@ -377,7 +377,7 @@ Lorem Ipsum
|
|||
|
||||
`;
|
||||
|
||||
// remove the useless line breaks? Does not matter too much
|
||||
// Remove the useless line breaks? Does not matter too much
|
||||
expect(parseMarkdownContentTitle(markdown)).toEqual({
|
||||
content: markdown,
|
||||
contentTitle: 'Markdown Title',
|
||||
|
|
|
@ -112,7 +112,7 @@ export function getFileCommitDate(
|
|||
const result = shell.exec(
|
||||
`git log ${extraArgs} ${formatArg} -- "${path.basename(file)}"`,
|
||||
{
|
||||
// cwd is important, see: https://github.com/facebook/docusaurus/pull/5048
|
||||
// Setting cwd is important, see: https://github.com/facebook/docusaurus/pull/5048
|
||||
cwd: path.dirname(file),
|
||||
silent: true,
|
||||
},
|
||||
|
|
|
@ -61,7 +61,7 @@ export function getPluginI18nPath({
|
|||
return path.join(
|
||||
siteDir,
|
||||
I18N_DIR_NAME,
|
||||
// namespace first by locale: convenient to work in a single folder for a
|
||||
// Namespace first by locale: convenient to work in a single folder for a
|
||||
// translator
|
||||
locale,
|
||||
// Make it convenient to use for single-instance
|
||||
|
|
|
@ -21,8 +21,10 @@ const isWindows = () => process.platform === 'win32';
|
|||
export const isNameTooLong = (str: string): boolean =>
|
||||
// Not entirely correct: we can't assume FS from OS. But good enough?
|
||||
isMacOs() || isWindows()
|
||||
? str.length + SPACE_FOR_APPENDING > MAX_PATH_SEGMENT_CHARS // macOS (APFS) and Windows (NTFS) filename length limit (255 chars)
|
||||
: Buffer.from(str).length + SPACE_FOR_APPENDING > MAX_PATH_SEGMENT_BYTES; // Other (255 bytes)
|
||||
? // Windows (NTFS) and macOS (APFS) filename length limit (255 chars)
|
||||
str.length + SPACE_FOR_APPENDING > MAX_PATH_SEGMENT_CHARS
|
||||
: // Other (255 bytes)
|
||||
Buffer.from(str).length + SPACE_FOR_APPENDING > MAX_PATH_SEGMENT_BYTES;
|
||||
|
||||
export function shortName(str: string): string {
|
||||
if (isMacOs() || isWindows()) {
|
||||
|
|
|
@ -24,7 +24,7 @@ function normalizeFrontMatterTag(
|
|||
|
||||
// TODO maybe make ensure the permalink is valid url path?
|
||||
function normalizeTagPermalink(permalink: string): string {
|
||||
// note: we always apply tagsPath on purpose. For versioned docs, v1/doc.md
|
||||
// Note: we always apply tagsPath on purpose. For versioned docs, v1/doc.md
|
||||
// and v2/doc.md tags with custom permalinks don't lead to the same created
|
||||
// page. tagsPath is different for each doc version
|
||||
return normalizeUrl([tagsPath, permalink]);
|
||||
|
|
|
@ -158,7 +158,6 @@ export function isValidPathname(str: string): boolean {
|
|||
return false;
|
||||
}
|
||||
try {
|
||||
// weird, but is there a better way?
|
||||
const parsedPathname = new URL(str, 'https://domain.com').pathname;
|
||||
return parsedPathname === str || parsedPathname === encodeURI(str);
|
||||
} catch {
|
||||
|
|
|
@ -37,11 +37,10 @@ type FileLoaderUtils = {
|
|||
* Inspired by https://github.com/gatsbyjs/gatsby/blob/8e6e021014da310b9cc7d02e58c9b3efe938c665/packages/gatsby/src/utils/webpack-utils.ts#L447
|
||||
*/
|
||||
export function getFileLoaderUtils(): FileLoaderUtils {
|
||||
// files/images < urlLoaderLimit will be inlined as base64 strings directly in
|
||||
// Files/images < urlLoaderLimit will be inlined as base64 strings directly in
|
||||
// the html
|
||||
const urlLoaderLimit = WEBPACK_URL_LOADER_LIMIT;
|
||||
|
||||
// defines the path/pattern of the assets handled by webpack
|
||||
const fileLoaderFileName = (folder: AssetFolder) =>
|
||||
path.posix.join(
|
||||
OUTPUT_STATIC_ASSETS_DIR_NAME,
|
||||
|
|
|
@ -45,7 +45,6 @@ export default async function beforeCli() {
|
|||
// Check is in background so it's fine to use a small value like 1h
|
||||
// Use 0 for debugging
|
||||
updateCheckInterval: 1000 * 60 * 60,
|
||||
// updateCheckInterval: 0
|
||||
});
|
||||
|
||||
// Hacky way to ensure we check for updates on first run
|
||||
|
@ -124,7 +123,7 @@ export default async function beforeCli() {
|
|||
console.log(docusaurusUpdateMessage);
|
||||
}
|
||||
|
||||
// notify user if node version needs to be updated
|
||||
// Notify user if node version needs to be updated
|
||||
if (!semver.satisfies(process.version, requiredVersion)) {
|
||||
logger.error('Minimum Node.js version not met :(');
|
||||
logger.info`You are using Node.js number=${process.version}, Requirement: Node.js number=${requiredVersion}.`;
|
||||
|
|
|
@ -11,12 +11,11 @@ module.exports = {
|
|||
'error',
|
||||
{
|
||||
patterns: [
|
||||
// prevent importing lodash in client bundle
|
||||
// prefer shipping vanilla JS
|
||||
// Prevent importing lodash in client bundle for bundle size
|
||||
'lodash',
|
||||
'lodash.**',
|
||||
'lodash/**',
|
||||
// prevent importing server code in client bundle
|
||||
// Prevent importing server code in client bundle
|
||||
'**/../babel/**',
|
||||
'**/../server/**',
|
||||
'**/../commands/**',
|
||||
|
|
|
@ -41,7 +41,7 @@ function createInlineHtmlBanner(baseUrl: string) {
|
|||
`;
|
||||
}
|
||||
|
||||
// fn needs to work for older browsers!
|
||||
// Needs to work for older browsers!
|
||||
function createInlineScript(baseUrl: string) {
|
||||
return `
|
||||
window['${InsertBannerWindowAttribute}'] = true;
|
||||
|
@ -119,7 +119,6 @@ export default function MaybeBaseUrlIssueBanner(): JSX.Element | null {
|
|||
siteConfig: {baseUrl, baseUrlIssueBanner},
|
||||
} = useDocusaurusContext();
|
||||
const {pathname} = useLocation();
|
||||
// returns true for the homepage during SSR
|
||||
const isHomePage = pathname === baseUrl;
|
||||
const enabled = baseUrlIssueBanner && isHomePage;
|
||||
return enabled ? <BaseUrlIssueBanner /> : null;
|
||||
|
|
|
@ -28,8 +28,7 @@ export const createStatefulLinksCollector = (): StatefulLinksCollector => {
|
|||
|
||||
const Context = React.createContext<LinksCollector>({
|
||||
collectLink: () => {
|
||||
// noop by default for client
|
||||
// we only use the broken links checker server-side
|
||||
// No-op for client. We only use the broken links checker server-side.
|
||||
},
|
||||
});
|
||||
|
||||
|
|
|
@ -174,7 +174,7 @@ function Link(
|
|||
onMouseEnter={onMouseEnter}
|
||||
innerRef={handleRef}
|
||||
to={targetLink}
|
||||
// avoid "React does not recognize the `activeClassName` prop on a DOM
|
||||
// Avoid "React does not recognize the `activeClassName` prop on a DOM
|
||||
// element"
|
||||
{...(isNavLink && {isActive, activeClassName})}
|
||||
/>
|
||||
|
|
|
@ -15,17 +15,9 @@ function addBaseUrl(
|
|||
url: string,
|
||||
{forcePrependBaseUrl = false, absolute = false}: BaseUrlOptions = {},
|
||||
): string {
|
||||
if (!url) {
|
||||
return url;
|
||||
}
|
||||
|
||||
// it never makes sense to add a base url to a local anchor url
|
||||
if (url.startsWith('#')) {
|
||||
return url;
|
||||
}
|
||||
|
||||
// it never makes sense to add a base url to an url with a protocol
|
||||
if (hasProtocol(url)) {
|
||||
// It never makes sense to add base url to a local anchor url, or one with a
|
||||
// protocol
|
||||
if (!url || url.startsWith('#') || hasProtocol(url)) {
|
||||
return url;
|
||||
}
|
||||
|
||||
|
|
|
@ -32,7 +32,7 @@ function mergeContexts({
|
|||
const data = {...parent.data, ...value?.data};
|
||||
|
||||
return {
|
||||
// nested routes are not supposed to override plugin attribute
|
||||
// Nested routes are not supposed to override plugin attribute
|
||||
plugin: parent.plugin,
|
||||
data,
|
||||
};
|
||||
|
|
|
@ -113,8 +113,9 @@ This behavior can have SEO impacts and create relative link issues.
|
|||
shell.exit(0);
|
||||
}
|
||||
|
||||
// github.io indicates organization repos that deploy via default branch.
|
||||
// All others use gh-pages. Organization deploys looks like:
|
||||
// github.io indicates organization repos that deploy via default branch. All
|
||||
// others use gh-pages (either case can be configured actually, but we can
|
||||
// make educated guesses). Organization deploys look like:
|
||||
// - Git repo: https://github.com/<organization>/<organization>.github.io
|
||||
// - Site url: https://<organization>.github.io
|
||||
const isGitHubPagesOrganizationDeploy = projectName.includes('.github.io');
|
||||
|
|
|
@ -41,7 +41,7 @@ export async function start(
|
|||
siteDir,
|
||||
customConfigFilePath: cliOptions.config,
|
||||
locale: cliOptions.locale,
|
||||
localizePath: undefined, // should this be configurable?
|
||||
localizePath: undefined, // Should this be configurable?
|
||||
});
|
||||
}
|
||||
|
||||
|
|
|
@ -13,7 +13,7 @@ import tree from 'tree-node-cli';
|
|||
import {eject, wrap} from '../actions';
|
||||
import {posixPath} from '@docusaurus/utils';
|
||||
|
||||
// use relative paths and sort files for tests
|
||||
// Use relative paths and sort files for tests
|
||||
function stableCreatedFiles(
|
||||
siteThemePath: string,
|
||||
createdFiles: string[],
|
||||
|
|
|
@ -53,7 +53,7 @@ export async function eject({
|
|||
const fromPath = path.join(themePath, componentName);
|
||||
const isDirectory = await isDir(fromPath);
|
||||
const globPattern = isDirectory
|
||||
? // do we really want to copy all components?
|
||||
? // Do we really want to copy all components?
|
||||
path.join(fromPath, '*')
|
||||
: `${fromPath}.*`;
|
||||
|
||||
|
|
|
@ -51,13 +51,13 @@ describe('handleBrokenLinks', () => {
|
|||
const linkToEmptyFolder2 = '/emptyFolder/';
|
||||
const allCollectedLinks = {
|
||||
'/docs/good doc with space': [
|
||||
// good - valid file with spaces in name
|
||||
// Good - valid file with spaces in name
|
||||
'./another%20good%20doc%20with%20space',
|
||||
// good - valid file with percent-20 in its name
|
||||
// Good - valid file with percent-20 in its name
|
||||
'./weird%20but%20good',
|
||||
// bad - non-existent file with spaces in name
|
||||
// Bad - non-existent file with spaces in name
|
||||
'./some%20other%20non-existent%20doc1',
|
||||
// evil - trying to use ../../ but '/' won't get decoded
|
||||
// Evil - trying to use ../../ but '/' won't get decoded
|
||||
// cSpell:ignore Fout
|
||||
'./break%2F..%2F..%2Fout2',
|
||||
],
|
||||
|
@ -91,11 +91,11 @@ describe('handleBrokenLinks', () => {
|
|||
linkToHtmlFile2,
|
||||
linkToJavadoc3,
|
||||
linkToJavadoc4,
|
||||
linkToEmptyFolder1, // not filtered!
|
||||
linkToEmptyFolder1, // Not filtered!
|
||||
],
|
||||
'/page2': [
|
||||
link2,
|
||||
linkToEmptyFolder2, // not filtered!
|
||||
linkToEmptyFolder2, // Not filtered!
|
||||
linkToJavadoc2,
|
||||
link3,
|
||||
linkToJavadoc3,
|
||||
|
|
|
@ -287,15 +287,14 @@ describe('normalizeConfig', () => {
|
|||
});
|
||||
|
||||
it('throws error for required fields', () => {
|
||||
expect(
|
||||
() =>
|
||||
validateConfig({
|
||||
invalidField: true,
|
||||
presets: {},
|
||||
stylesheets: {},
|
||||
themes: {},
|
||||
scripts: {},
|
||||
} as unknown as DocusaurusConfig), // to fields not in the type
|
||||
expect(() =>
|
||||
validateConfig({
|
||||
invalidField: true,
|
||||
presets: {},
|
||||
stylesheets: {},
|
||||
themes: {},
|
||||
scripts: {},
|
||||
}),
|
||||
).toThrowErrorMatchingSnapshot();
|
||||
});
|
||||
});
|
||||
|
|
|
@ -84,7 +84,6 @@ function getAllBrokenLinks({
|
|||
getPageBrokenLinks({pageLinks, pagePath, routes: filteredRoutes}),
|
||||
);
|
||||
|
||||
// remove pages without any broken link
|
||||
return _.pickBy(allBrokenLinks, (brokenLinks) => brokenLinks.length > 0);
|
||||
}
|
||||
|
||||
|
|
|
@ -5,58 +5,38 @@
|
|||
* LICENSE file in the root directory of this source tree.
|
||||
*/
|
||||
|
||||
import {execSync} from 'child_process';
|
||||
import {execSync, type ExecSyncOptionsWithStringEncoding} from 'child_process';
|
||||
import detect from 'detect-port';
|
||||
import logger from '@docusaurus/logger';
|
||||
import prompts from 'prompts';
|
||||
|
||||
const execOptions = {
|
||||
encoding: 'utf8' as const,
|
||||
stdio: [
|
||||
'pipe' as const, // stdin (default)
|
||||
'pipe' as const, // stdout (default)
|
||||
'ignore' as const, // stderr
|
||||
],
|
||||
const execOptions: ExecSyncOptionsWithStringEncoding = {
|
||||
encoding: 'utf8',
|
||||
stdio: [/* stdin */ 'pipe', /* stdout */ 'pipe', /* stderr */ 'ignore'],
|
||||
};
|
||||
|
||||
// Clears console
|
||||
function clearConsole(): void {
|
||||
process.stdout.write(
|
||||
process.platform === 'win32' ? '\x1B[2J\x1B[0f' : '\x1B[2J\x1B[3J\x1B[H',
|
||||
);
|
||||
}
|
||||
|
||||
// Gets process id of what is on port
|
||||
function getProcessIdOnPort(port: number): string {
|
||||
return execSync(`lsof -i:${port} -P -t -sTCP:LISTEN`, execOptions)
|
||||
.split('\n')[0]!
|
||||
.trim();
|
||||
}
|
||||
|
||||
// Gets process command
|
||||
function getProcessCommand(processId: string): string {
|
||||
const command = execSync(
|
||||
`ps -o command -p ${processId} | sed -n 2p`,
|
||||
execOptions,
|
||||
);
|
||||
|
||||
return command.replace(/\n$/, '');
|
||||
}
|
||||
|
||||
// Gets directory of a process from its process id
|
||||
function getDirectoryOfProcessById(processId: string): string {
|
||||
return execSync(
|
||||
`lsof -p ${processId} | awk '$4=="cwd" {for (i=9; i<=NF; i++) printf "%s ", $i}'`,
|
||||
execOptions,
|
||||
).trim();
|
||||
}
|
||||
|
||||
// Gets process on port
|
||||
function getProcessForPort(port: number): string | null {
|
||||
try {
|
||||
const processId = getProcessIdOnPort(port);
|
||||
const directory = getDirectoryOfProcessById(processId);
|
||||
const command = getProcessCommand(processId);
|
||||
const processId = execSync(
|
||||
`lsof -i:${port} -P -t -sTCP:LISTEN`,
|
||||
execOptions,
|
||||
)
|
||||
.split('\n')[0]!
|
||||
.trim();
|
||||
const directory = execSync(
|
||||
`lsof -p ${processId} | awk '$4=="cwd" {for (i=9; i<=NF; i++) printf "%s ", $i}'`,
|
||||
execOptions,
|
||||
).trim();
|
||||
const command = execSync(
|
||||
`ps -o command -p ${processId} | sed -n 2p`,
|
||||
execOptions,
|
||||
).replace(/\n$/, '');
|
||||
return logger.interpolate`code=${command} subdue=${`(pid ${processId})`} in path=${directory}`;
|
||||
} catch {
|
||||
return null;
|
||||
|
|
|
@ -166,8 +166,8 @@ export default ${JSON.stringify(siteConfig, null, 2)};
|
|||
'client-modules.js',
|
||||
`export default [
|
||||
${clientModules
|
||||
// import() is async so we use require() because client modules can have
|
||||
// CSS and the order matters for loading CSS.
|
||||
// Use `require()` because `import()` is async but client modules can have CSS
|
||||
// and the order matters for loading CSS.
|
||||
.map((clientModule) => ` require('${escapePath(clientModule)}'),`)
|
||||
.join('\n')}
|
||||
];
|
||||
|
|
|
@ -98,7 +98,7 @@ export async function loadPlugins(context: LoadContext): Promise<{
|
|||
return;
|
||||
}
|
||||
const pluginId = plugin.options.id;
|
||||
// plugins data files are namespaced by pluginName/pluginId
|
||||
// Plugins data files are namespaced by pluginName/pluginId
|
||||
const dataDir = path.join(
|
||||
context.generatedFilesDir,
|
||||
plugin.name,
|
||||
|
|
|
@ -28,7 +28,7 @@ function getOptionValidationFunction(
|
|||
normalizedPluginConfig: NormalizedPluginConfig,
|
||||
): PluginModule['validateOptions'] {
|
||||
if (normalizedPluginConfig.pluginModule) {
|
||||
// support both commonjs and ES modules
|
||||
// Support both CommonJS and ES modules
|
||||
return (
|
||||
normalizedPluginConfig.pluginModule.module?.default?.validateOptions ??
|
||||
normalizedPluginConfig.pluginModule.module?.validateOptions
|
||||
|
@ -41,7 +41,7 @@ function getThemeValidationFunction(
|
|||
normalizedPluginConfig: NormalizedPluginConfig,
|
||||
): PluginModule['validateThemeConfig'] {
|
||||
if (normalizedPluginConfig.pluginModule) {
|
||||
// support both commonjs and ES modules
|
||||
// Support both CommonJS and ES modules
|
||||
return (
|
||||
normalizedPluginConfig.pluginModule.module.default?.validateThemeConfig ??
|
||||
normalizedPluginConfig.pluginModule.module.validateThemeConfig
|
||||
|
@ -65,7 +65,6 @@ export async function initPlugins(
|
|||
async function doGetPluginVersion(
|
||||
normalizedPluginConfig: NormalizedPluginConfig,
|
||||
): Promise<PluginVersionInformation> {
|
||||
// get plugin version
|
||||
if (normalizedPluginConfig.pluginModule?.path) {
|
||||
const pluginPath = pluginRequire.resolve(
|
||||
normalizedPluginConfig.pluginModule?.path,
|
||||
|
|
|
@ -94,7 +94,7 @@ function mergeTranslationFileContent({
|
|||
message: options.override
|
||||
? message
|
||||
: existingContent[key]?.message ?? message,
|
||||
description, // description
|
||||
description,
|
||||
};
|
||||
},
|
||||
);
|
||||
|
@ -143,7 +143,7 @@ Maybe you should remove them? ${unknownKeys}`;
|
|||
}
|
||||
}
|
||||
|
||||
// should we make this configurable?
|
||||
// Should we make this configurable?
|
||||
export function getTranslationsLocaleDirPath(
|
||||
context: TranslationContext,
|
||||
): string {
|
||||
|
@ -248,7 +248,7 @@ export async function localizePluginTranslationFile({
|
|||
const localizedContent = await readTranslationFileContent(filePath);
|
||||
|
||||
if (localizedContent) {
|
||||
// localized messages "override" default unlocalized messages
|
||||
// Localized messages "override" default unlocalized messages
|
||||
return {
|
||||
path: translationFile.path,
|
||||
content: {
|
||||
|
|
|
@ -52,7 +52,7 @@ describe('customize JS loader', () => {
|
|||
|
||||
describe('extending generated webpack config', () => {
|
||||
it('direct mutation on generated webpack config object', async () => {
|
||||
// fake generated webpack config
|
||||
// Fake generated webpack config
|
||||
let config: Configuration = {
|
||||
output: {
|
||||
path: __dirname,
|
||||
|
|
|
@ -24,7 +24,7 @@ const CSS_MODULE_REGEX = /\.module\.css$/i;
|
|||
export const clientDir = path.join(__dirname, '..', 'client');
|
||||
|
||||
const LibrariesToTranspile = [
|
||||
'copy-text-to-clipboard', // contains optional catch binding, incompatible with recent versions of Edge
|
||||
'copy-text-to-clipboard', // Contains optional catch binding, incompatible with recent versions of Edge
|
||||
];
|
||||
|
||||
const LibrariesToTranspileRegex = new RegExp(
|
||||
|
@ -32,7 +32,7 @@ const LibrariesToTranspileRegex = new RegExp(
|
|||
);
|
||||
|
||||
export function excludeJS(modulePath: string): boolean {
|
||||
// always transpile client dir
|
||||
// Always transpile client dir
|
||||
if (modulePath.startsWith(clientDir)) {
|
||||
return false;
|
||||
}
|
||||
|
@ -118,9 +118,9 @@ export async function createBaseConfig(
|
|||
},
|
||||
devtool: isProd ? undefined : 'eval-cheap-module-source-map',
|
||||
resolve: {
|
||||
unsafeCache: false, // not enabled, does not seem to improve perf much
|
||||
unsafeCache: false, // Not enabled, does not seem to improve perf much
|
||||
extensions: ['.wasm', '.mjs', '.js', '.jsx', '.ts', '.tsx', '.json'],
|
||||
symlinks: true, // see https://github.com/facebook/docusaurus/issues/3272
|
||||
symlinks: true, // See https://github.com/facebook/docusaurus/issues/3272
|
||||
roots: [
|
||||
// Allow resolution of url("/fonts/xyz.ttf") by webpack
|
||||
// See https://webpack.js.org/configuration/resolve/#resolveroots
|
||||
|
@ -167,7 +167,7 @@ export async function createBaseConfig(
|
|||
// include [name] in the filenames
|
||||
name: false,
|
||||
cacheGroups: {
|
||||
// disable the built-in cacheGroups
|
||||
// Disable the built-in cacheGroups
|
||||
default: false,
|
||||
common: {
|
||||
name: 'common',
|
||||
|
@ -238,7 +238,7 @@ export async function createBaseConfig(
|
|||
chunkFilename: isProd
|
||||
? 'assets/css/[name].[contenthash:8].css'
|
||||
: '[name].css',
|
||||
// remove css order warnings if css imports are not sorted
|
||||
// Remove css order warnings if css imports are not sorted
|
||||
// alphabetically. See https://github.com/webpack-contrib/mini-css-extract-plugin/pull/422
|
||||
// for more reasoning
|
||||
ignoreOrder: true,
|
||||
|
|
|
@ -23,7 +23,7 @@ export default async function createClientConfig(
|
|||
const config = await createBaseConfig(props, false, minify);
|
||||
|
||||
const clientConfig = merge(config, {
|
||||
// useless, disabled on purpose (errors on existing sites with no
|
||||
// Useless, disabled on purpose (errors on existing sites with no
|
||||
// browserslist config)
|
||||
// target: 'browserslist',
|
||||
entry: path.resolve(__dirname, '../client/clientEntry.js'),
|
||||
|
|
|
@ -204,7 +204,7 @@ export function applyConfigurePostCss(
|
|||
options: {postcssOptions: PostCssOptions};
|
||||
};
|
||||
|
||||
// not ideal heuristic but good enough for our use-case?
|
||||
// Not ideal heuristic but good enough for our use-case?
|
||||
function isPostCssLoader(loader: unknown): loader is LocalPostCSSLoader {
|
||||
return !!(loader as LocalPostCSSLoader)?.options?.postcssOptions;
|
||||
}
|
||||
|
@ -249,7 +249,7 @@ export function compile(config: Configuration[]): Promise<void> {
|
|||
}
|
||||
reject(err);
|
||||
}
|
||||
// let plugins consume all the stats
|
||||
// Let plugins consume all the stats
|
||||
const errorsWarnings = stats?.toJson('errors-warnings');
|
||||
if (stats?.hasErrors()) {
|
||||
reject(new Error('Failed to compile with errors.'));
|
||||
|
@ -363,7 +363,7 @@ export function getMinimizer(
|
|||
parallel: getTerserParallel(),
|
||||
terserOptions: {
|
||||
parse: {
|
||||
// we want uglify-js to parse ecma 8 code. However, we don't want it
|
||||
// We want uglify-js to parse ecma 8 code. However, we don't want it
|
||||
// to apply any minification steps that turns valid ecma 5 code
|
||||
// into invalid ecma 5 code. This is why the 'compress' and 'output'
|
||||
// sections only apply transformations that are ecma 5 safe
|
||||
|
|
|
@ -21,8 +21,8 @@ const SUPPORTED_MIMES: {[ext: string]: string} = {
|
|||
};
|
||||
|
||||
/**
|
||||
* it returns a Base64 image string with required formatting
|
||||
* to work on the web (<img src=".." /> or in CSS url('..'))
|
||||
* It returns a Base64 image string with required formatting to work on the web
|
||||
* (<img src=".." /> or in CSS url('..'))
|
||||
*/
|
||||
const toBase64 = (extMimeType: string, data: Buffer): string =>
|
||||
`data:${extMimeType};base64,${data.toString('base64')}`;
|
||||
|
|
|
@ -21,6 +21,7 @@ backticks
|
|||
bartosz
|
||||
beforeinstallprompt
|
||||
bhatt
|
||||
blocklist
|
||||
blockquotes
|
||||
browserslist
|
||||
browserstack
|
||||
|
|
|
@ -87,7 +87,7 @@ const sidebars = {
|
|||
collapsed: false,
|
||||
collapsible: false,
|
||||
items: [
|
||||
// title
|
||||
// Title
|
||||
{
|
||||
type: 'html',
|
||||
value: 'Some Text',
|
||||
|
|
|
@ -22,7 +22,7 @@ exports.dogfoodingThemeInstances = dogfoodingThemeInstances;
|
|||
/** @type {import('@docusaurus/types').PluginConfig[]} */
|
||||
const dogfoodingPluginInstances = [
|
||||
[
|
||||
'content-docs', // dogfood shorthand
|
||||
'content-docs', // Shorthand
|
||||
/** @type {import('@docusaurus/plugin-content-docs').Options} */
|
||||
({
|
||||
id: 'docs-tests',
|
||||
|
@ -52,7 +52,7 @@ const dogfoodingPluginInstances = [
|
|||
],
|
||||
|
||||
[
|
||||
'@docusaurus/plugin-content-blog', // dogfood longhand
|
||||
'@docusaurus/plugin-content-blog', // Longhand
|
||||
/** @type {import('@docusaurus/plugin-content-blog').Options} */
|
||||
({
|
||||
id: 'blog-tests',
|
||||
|
@ -75,7 +75,7 @@ const dogfoodingPluginInstances = [
|
|||
],
|
||||
|
||||
[
|
||||
require.resolve('@docusaurus/plugin-content-pages'), // dogfood longhand resolve
|
||||
require.resolve('@docusaurus/plugin-content-pages'), // Full path
|
||||
/** @type {import('@docusaurus/plugin-content-pages').Options} */
|
||||
({
|
||||
id: 'pages-tests',
|
||||
|
|
|
@ -68,16 +68,16 @@ Please double-check or clean up these components from the config:
|
|||
|
||||
// TODO temp workaround: non-comps should be forbidden to wrap
|
||||
if (action === 'wrap') {
|
||||
const WrapBlacklist = [
|
||||
'Layout', // due to theme-fallback?
|
||||
const WrapBlocklist = [
|
||||
'Layout', // Due to theme-fallback?
|
||||
];
|
||||
|
||||
componentNames = componentNames.filter((componentName) => {
|
||||
const blacklisted = WrapBlacklist.includes(componentName);
|
||||
if (!WrapBlacklist) {
|
||||
logger.warn(`${componentName} is blacklisted and will not be wrapped`);
|
||||
const blocked = WrapBlocklist.includes(componentName);
|
||||
if (blocked) {
|
||||
logger.warn(`${componentName} is blocked and will not be wrapped`);
|
||||
}
|
||||
return !blacklisted;
|
||||
return !blocked;
|
||||
});
|
||||
}
|
||||
|
||||
|
|
|
@ -170,8 +170,8 @@ const config = {
|
|||
({
|
||||
fromExtensions: ['html'],
|
||||
createRedirects(routePath) {
|
||||
// redirect to /docs from /docs/introduction,
|
||||
// as introduction has been made the home doc
|
||||
// Redirect to /docs from /docs/introduction, as introduction has been
|
||||
// made the home doc
|
||||
if (allDocHomesPaths.includes(routePath)) {
|
||||
return [`${routePath}/introduction`];
|
||||
}
|
||||
|
@ -195,13 +195,15 @@ const config = {
|
|||
],
|
||||
[
|
||||
'ideal-image',
|
||||
{
|
||||
/** @type {import('@docusaurus/plugin-ideal-image').PluginOptions} */
|
||||
({
|
||||
quality: 70,
|
||||
max: 1030, // max resized image's size.
|
||||
min: 640, // min resized image's size. if original is lower, use that size.
|
||||
steps: 2, // the max number of images generated between min and max (inclusive)
|
||||
// disableInDev: false,
|
||||
},
|
||||
max: 1030,
|
||||
min: 640,
|
||||
steps: 2,
|
||||
// Use false to debug, but it incurs huge perf costs
|
||||
disableInDev: true,
|
||||
}),
|
||||
],
|
||||
[
|
||||
'pwa',
|
||||
|
@ -413,7 +415,7 @@ const config = {
|
|||
label: 'Tests',
|
||||
docsPluginId: 'docs-tests',
|
||||
},
|
||||
// right
|
||||
// Right
|
||||
{
|
||||
type: 'docsVersionDropdown',
|
||||
position: 'right',
|
||||
|
|
|
@ -47,7 +47,7 @@ const EXPECTED_CSS_MARKERS = [
|
|||
'.test-marker-theme-layout',
|
||||
'.test-marker-site-index-page',
|
||||
|
||||
// lazy loaded lib
|
||||
// Lazy-loaded lib
|
||||
'.DocSearch-Modal',
|
||||
];
|
||||
|
||||
|
|
|
@ -24,7 +24,7 @@ const pollInterval = 5000;
|
|||
const timeout = 5 * 60 * 1000;
|
||||
|
||||
const projectId = 428890;
|
||||
const token = process.env.CROWDIN_PERSONAL_TOKEN; // set on Netlify
|
||||
const token = process.env.CROWDIN_PERSONAL_TOKEN; // Set on Netlify
|
||||
|
||||
const translations = new Translations({token});
|
||||
|
||||
|
|
Loading…
Add table
Reference in a new issue