mirror of
https://github.com/facebook/docusaurus.git
synced 2025-05-14 17:47:40 +02:00
misc(v2): clean up comments in code (#2294)
This commit is contained in:
parent
d7f3dff6e6
commit
996b115199
45 changed files with 234 additions and 155 deletions
|
@ -50,7 +50,7 @@ export async function init(
|
||||||
|
|
||||||
let name = siteName;
|
let name = siteName;
|
||||||
|
|
||||||
// Prompt if siteName is not passed from CLI
|
// Prompt if siteName is not passed from CLI.
|
||||||
if (!name) {
|
if (!name) {
|
||||||
const {name: promptedName} = await inquirer.prompt({
|
const {name: promptedName} = await inquirer.prompt({
|
||||||
type: 'input',
|
type: 'input',
|
||||||
|
@ -71,7 +71,7 @@ export async function init(
|
||||||
}
|
}
|
||||||
|
|
||||||
let template = reqTemplate;
|
let template = reqTemplate;
|
||||||
// Prompt if template is not provided from CLI
|
// Prompt if template is not provided from CLI.
|
||||||
if (!template) {
|
if (!template) {
|
||||||
const {template: promptedTemplate} = await inquirer.prompt({
|
const {template: promptedTemplate} = await inquirer.prompt({
|
||||||
type: 'list',
|
type: 'list',
|
||||||
|
@ -82,7 +82,7 @@ export async function init(
|
||||||
template = promptedTemplate;
|
template = promptedTemplate;
|
||||||
}
|
}
|
||||||
|
|
||||||
// If user choose Git repository, we'll prompt for the url
|
// If user choose Git repository, we'll prompt for the url.
|
||||||
if (template === gitChoice) {
|
if (template === gitChoice) {
|
||||||
const {gitRepoUrl} = await inquirer.prompt({
|
const {gitRepoUrl} = await inquirer.prompt({
|
||||||
type: 'input',
|
type: 'input',
|
||||||
|
@ -112,7 +112,7 @@ export async function init(
|
||||||
throw new Error(chalk.red(`Cloning Git template: ${template} failed!`));
|
throw new Error(chalk.red(`Cloning Git template: ${template} failed!`));
|
||||||
}
|
}
|
||||||
} else if (template && templates.includes(template)) {
|
} else if (template && templates.includes(template)) {
|
||||||
// Docusaurus templates
|
// Docusaurus templates.
|
||||||
try {
|
try {
|
||||||
await fs.copy(path.resolve(templatesDir, template), dest);
|
await fs.copy(path.resolve(templatesDir, template), dest);
|
||||||
} catch (err) {
|
} catch (err) {
|
||||||
|
@ -125,7 +125,7 @@ export async function init(
|
||||||
throw new Error('Invalid template');
|
throw new Error('Invalid template');
|
||||||
}
|
}
|
||||||
|
|
||||||
// Update package.json info
|
// Update package.json info.
|
||||||
try {
|
try {
|
||||||
await updatePkg(path.join(dest, 'package.json'), {
|
await updatePkg(path.join(dest, 'package.json'), {
|
||||||
name: kebabCase(name),
|
name: kebabCase(name),
|
||||||
|
@ -137,7 +137,7 @@ export async function init(
|
||||||
throw err;
|
throw err;
|
||||||
}
|
}
|
||||||
|
|
||||||
// We need to Rename the gitignore file to .gitignore
|
// We need to rename the gitignore file to .gitignore
|
||||||
if (
|
if (
|
||||||
!fs.pathExistsSync(path.join(dest, '.gitignore')) &&
|
!fs.pathExistsSync(path.join(dest, '.gitignore')) &&
|
||||||
fs.pathExistsSync(path.join(dest, 'gitignore'))
|
fs.pathExistsSync(path.join(dest, 'gitignore'))
|
||||||
|
|
|
@ -46,12 +46,13 @@ module.exports = async function(fileString) {
|
||||||
|
|
||||||
let exportStr = `export const frontMatter = ${stringifyObject(data)};`;
|
let exportStr = `export const frontMatter = ${stringifyObject(data)};`;
|
||||||
|
|
||||||
// Read metadata for this MDX and export it
|
// Read metadata for this MDX and export it.
|
||||||
if (options.metadataPath && typeof options.metadataPath === 'function') {
|
if (options.metadataPath && typeof options.metadataPath === 'function') {
|
||||||
const metadataPath = options.metadataPath(this.resourcePath);
|
const metadataPath = options.metadataPath(this.resourcePath);
|
||||||
|
|
||||||
if (metadataPath) {
|
if (metadataPath) {
|
||||||
// Add as dependency of this loader result so that we can recompile if metadata is changed
|
// Add as dependency of this loader result so that we can
|
||||||
|
// recompile if metadata is changed.
|
||||||
this.addDependency(metadataPath);
|
this.addDependency(metadataPath);
|
||||||
const metadata = await readFile(metadataPath, 'utf8');
|
const metadata = await readFile(metadataPath, 'utf8');
|
||||||
exportStr += `\nexport const metadata = ${metadata};`;
|
exportStr += `\nexport const metadata = ${metadata};`;
|
||||||
|
|
|
@ -29,12 +29,13 @@ function toValue(node) {
|
||||||
default:
|
default:
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
return toString(node);
|
return toString(node);
|
||||||
}
|
}
|
||||||
|
|
||||||
// Visit all headings. We `slug` all headings (to account for
|
// Visit all headings. We `slug` all headings (to account for
|
||||||
// duplicates), but only take h2 and h3 headings.
|
// duplicates), but only take h2 and h3 headings.
|
||||||
const search = node => {
|
function search(node) {
|
||||||
const headings = [];
|
const headings = [];
|
||||||
let current = -1;
|
let current = -1;
|
||||||
let currentDepth = 0;
|
let currentDepth = 0;
|
||||||
|
@ -65,6 +66,6 @@ const search = node => {
|
||||||
visit(node, 'heading', onHeading);
|
visit(node, 'heading', onHeading);
|
||||||
|
|
||||||
return headings;
|
return headings;
|
||||||
};
|
}
|
||||||
|
|
||||||
module.exports = search;
|
module.exports = search;
|
||||||
|
|
|
@ -18,7 +18,7 @@ export function truncate(fileString: string, truncateMarker: RegExp) {
|
||||||
}
|
}
|
||||||
|
|
||||||
// YYYY-MM-DD-{name}.mdx?
|
// YYYY-MM-DD-{name}.mdx?
|
||||||
// prefer named capture, but old Node version does not support.
|
// Prefer named capture, but older Node versions do not support it.
|
||||||
const FILENAME_PATTERN = /^(\d{4}-\d{1,2}-\d{1,2})-?(.*?).mdx?$/;
|
const FILENAME_PATTERN = /^(\d{4}-\d{1,2}-\d{1,2})-?(.*?).mdx?$/;
|
||||||
|
|
||||||
function toUrl({date, link}: DateLink) {
|
function toUrl({date, link}: DateLink) {
|
||||||
|
@ -111,15 +111,18 @@ export async function generateBlogPosts(
|
||||||
// Extract date and title from filename.
|
// Extract date and title from filename.
|
||||||
const match = blogFileName.match(FILENAME_PATTERN);
|
const match = blogFileName.match(FILENAME_PATTERN);
|
||||||
let linkName = blogFileName.replace(/\.mdx?$/, '');
|
let linkName = blogFileName.replace(/\.mdx?$/, '');
|
||||||
|
|
||||||
if (match) {
|
if (match) {
|
||||||
const [, dateString, name] = match;
|
const [, dateString, name] = match;
|
||||||
date = new Date(dateString);
|
date = new Date(dateString);
|
||||||
linkName = name;
|
linkName = name;
|
||||||
}
|
}
|
||||||
|
|
||||||
// Prefer user-defined date.
|
// Prefer user-defined date.
|
||||||
if (frontMatter.date) {
|
if (frontMatter.date) {
|
||||||
date = new Date(frontMatter.date);
|
date = new Date(frontMatter.date);
|
||||||
}
|
}
|
||||||
|
|
||||||
// Use file create time for blog.
|
// Use file create time for blog.
|
||||||
date = date || (await fs.stat(source)).birthtime;
|
date = date || (await fs.stat(source)).birthtime;
|
||||||
frontMatter.title = frontMatter.title || linkName;
|
frontMatter.title = frontMatter.title || linkName;
|
||||||
|
|
|
@ -41,7 +41,7 @@ const DEFAULT_OPTIONS: PluginOptions = {
|
||||||
blogTagsPostsComponent: '@theme/BlogTagsPostsPage',
|
blogTagsPostsComponent: '@theme/BlogTagsPostsPage',
|
||||||
remarkPlugins: [],
|
remarkPlugins: [],
|
||||||
rehypePlugins: [],
|
rehypePlugins: [],
|
||||||
truncateMarker: /<!--\s*(truncate)\s*-->/, // Regex
|
truncateMarker: /<!--\s*(truncate)\s*-->/, // Regex.
|
||||||
};
|
};
|
||||||
|
|
||||||
function assertFeedTypes(val: any): asserts val is FeedType {
|
function assertFeedTypes(val: any): asserts val is FeedType {
|
||||||
|
@ -94,7 +94,7 @@ export default function pluginContentBlog(
|
||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
|
|
||||||
// Colocate next and prev metadata
|
// Colocate next and prev metadata.
|
||||||
blogPosts.forEach((blogPost, index) => {
|
blogPosts.forEach((blogPost, index) => {
|
||||||
const prevItem = index > 0 ? blogPosts[index - 1] : null;
|
const prevItem = index > 0 ? blogPosts[index - 1] : null;
|
||||||
if (prevItem) {
|
if (prevItem) {
|
||||||
|
@ -103,6 +103,7 @@ export default function pluginContentBlog(
|
||||||
permalink: prevItem.metadata.permalink,
|
permalink: prevItem.metadata.permalink,
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
const nextItem =
|
const nextItem =
|
||||||
index < blogPosts.length - 1 ? blogPosts[index + 1] : null;
|
index < blogPosts.length - 1 ? blogPosts[index + 1] : null;
|
||||||
if (nextItem) {
|
if (nextItem) {
|
||||||
|
@ -168,7 +169,8 @@ export default function pluginContentBlog(
|
||||||
const permalink = normalizeUrl([tagsPath, normalizedTag]);
|
const permalink = normalizeUrl([tagsPath, normalizedTag]);
|
||||||
if (!blogTags[normalizedTag]) {
|
if (!blogTags[normalizedTag]) {
|
||||||
blogTags[normalizedTag] = {
|
blogTags[normalizedTag] = {
|
||||||
name: tag.toLowerCase(), // Will only use the name of the first occurrence of the tag.
|
// Will only use the name of the first occurrence of the tag.
|
||||||
|
name: tag.toLowerCase(),
|
||||||
items: [],
|
items: [],
|
||||||
permalink,
|
permalink,
|
||||||
};
|
};
|
||||||
|
@ -232,7 +234,8 @@ export default function pluginContentBlog(
|
||||||
blogPosts.map(async blogPost => {
|
blogPosts.map(async blogPost => {
|
||||||
const {id, metadata} = blogPost;
|
const {id, metadata} = blogPost;
|
||||||
await createData(
|
await createData(
|
||||||
// Note that this created data path must be in sync with metadataPath provided to mdx-loader
|
// Note that this created data path must be in sync with
|
||||||
|
// metadataPath provided to mdx-loader.
|
||||||
`${docuHash(metadata.source)}.json`,
|
`${docuHash(metadata.source)}.json`,
|
||||||
JSON.stringify(metadata, null, 2),
|
JSON.stringify(metadata, null, 2),
|
||||||
);
|
);
|
||||||
|
@ -374,7 +377,8 @@ export default function pluginContentBlog(
|
||||||
options: {
|
options: {
|
||||||
remarkPlugins,
|
remarkPlugins,
|
||||||
rehypePlugins,
|
rehypePlugins,
|
||||||
// Note that metadataPath must be the same/ in-sync as the path from createData for each MDX
|
// Note that metadataPath must be the same/in-sync as
|
||||||
|
// the path from createData for each MDX.
|
||||||
metadataPath: (mdxPath: string) => {
|
metadataPath: (mdxPath: string) => {
|
||||||
const aliasedSource = aliasedSitePath(mdxPath, siteDir);
|
const aliasedSource = aliasedSitePath(mdxPath, siteDir);
|
||||||
return path.join(
|
return path.join(
|
||||||
|
|
|
@ -16,10 +16,11 @@ export = function(fileString: string) {
|
||||||
|
|
||||||
let finalContent = fileString;
|
let finalContent = fileString;
|
||||||
|
|
||||||
// Truncate content if requested (e.g: file.md?truncated=true)
|
// Truncate content if requested (e.g: file.md?truncated=true).
|
||||||
const {truncated} = this.resourceQuery && parseQuery(this.resourceQuery);
|
const {truncated} = this.resourceQuery && parseQuery(this.resourceQuery);
|
||||||
if (truncated) {
|
if (truncated) {
|
||||||
finalContent = truncate(fileString, truncateMarker);
|
finalContent = truncate(fileString, truncateMarker);
|
||||||
}
|
}
|
||||||
|
|
||||||
return callback && callback(null, finalContent);
|
return callback && callback(null, finalContent);
|
||||||
} as loader.Loader;
|
} as loader.Loader;
|
||||||
|
|
|
@ -71,7 +71,7 @@ export default function pluginContentDocs(
|
||||||
'docusaurus-plugin-content-docs',
|
'docusaurus-plugin-content-docs',
|
||||||
);
|
);
|
||||||
|
|
||||||
// Versioning
|
// Versioning.
|
||||||
const env = loadEnv(siteDir);
|
const env = loadEnv(siteDir);
|
||||||
const {versioning} = env;
|
const {versioning} = env;
|
||||||
const {
|
const {
|
||||||
|
@ -147,7 +147,7 @@ export default function pluginContentDocs(
|
||||||
),
|
),
|
||||||
);
|
);
|
||||||
|
|
||||||
// Metadata for versioned docs
|
// Metadata for versioned docs.
|
||||||
if (versioning.enabled) {
|
if (versioning.enabled) {
|
||||||
const versionedGlob = _.flatten(
|
const versionedGlob = _.flatten(
|
||||||
include.map(pattern =>
|
include.map(pattern =>
|
||||||
|
@ -173,7 +173,7 @@ export default function pluginContentDocs(
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
// Load the sidebars & create docs ordering
|
// Load the sidebars and create docs ordering.
|
||||||
const sidebarPaths = [
|
const sidebarPaths = [
|
||||||
sidebarPath,
|
sidebarPath,
|
||||||
...versionsNames.map(
|
...versionsNames.map(
|
||||||
|
@ -185,7 +185,7 @@ export default function pluginContentDocs(
|
||||||
|
|
||||||
await Promise.all(docsPromises);
|
await Promise.all(docsPromises);
|
||||||
|
|
||||||
// Construct inter-metadata relationship in docsMetadata
|
// Construct inter-metadata relationship in docsMetadata.
|
||||||
const docsMetadata: DocsMetadata = {};
|
const docsMetadata: DocsMetadata = {};
|
||||||
const permalinkToSidebar: PermalinkToSidebar = {};
|
const permalinkToSidebar: PermalinkToSidebar = {};
|
||||||
const versionToSidebars: VersionToSidebars = {};
|
const versionToSidebars: VersionToSidebars = {};
|
||||||
|
@ -211,7 +211,7 @@ export default function pluginContentDocs(
|
||||||
next,
|
next,
|
||||||
};
|
};
|
||||||
|
|
||||||
// sourceToPermalink and permalinkToSidebar mapping
|
// sourceToPermalink and permalinkToSidebar mapping.
|
||||||
const {source, permalink, version} = docsMetadataRaw[currentID];
|
const {source, permalink, version} = docsMetadataRaw[currentID];
|
||||||
sourceToPermalink[source] = permalink;
|
sourceToPermalink[source] = permalink;
|
||||||
if (sidebar) {
|
if (sidebar) {
|
||||||
|
@ -255,8 +255,9 @@ export default function pluginContentDocs(
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
// Transform the sidebar so that all sidebar item will be in the form of 'link' or 'category' only
|
// Transform the sidebar so that all sidebar item will be in the
|
||||||
// This is what will be passed as props to the UI component
|
// form of 'link' or 'category' only.
|
||||||
|
// This is what will be passed as props to the UI component.
|
||||||
const docsSidebars: DocsSidebar = Object.entries(loadedSidebars).reduce(
|
const docsSidebars: DocsSidebar = Object.entries(loadedSidebars).reduce(
|
||||||
(acc: DocsSidebar, [sidebarId, sidebarItems]) => {
|
(acc: DocsSidebar, [sidebarId, sidebarItems]) => {
|
||||||
acc[sidebarId] = sidebarItems.map(normalizeItem);
|
acc[sidebarId] = sidebarItems.map(normalizeItem);
|
||||||
|
@ -290,10 +291,12 @@ export default function pluginContentDocs(
|
||||||
const routes = await Promise.all(
|
const routes = await Promise.all(
|
||||||
metadataItems.map(async metadataItem => {
|
metadataItems.map(async metadataItem => {
|
||||||
await createData(
|
await createData(
|
||||||
// Note that this created data path must be in sync with metadataPath provided to mdx-loader
|
// Note that this created data path must be in sync with
|
||||||
|
// metadataPath provided to mdx-loader.
|
||||||
`${docuHash(metadataItem.source)}.json`,
|
`${docuHash(metadataItem.source)}.json`,
|
||||||
JSON.stringify(metadataItem, null, 2),
|
JSON.stringify(metadataItem, null, 2),
|
||||||
);
|
);
|
||||||
|
|
||||||
return {
|
return {
|
||||||
path: metadataItem.permalink,
|
path: metadataItem.permalink,
|
||||||
component: docItemComponent,
|
component: docItemComponent,
|
||||||
|
@ -304,6 +307,7 @@ export default function pluginContentDocs(
|
||||||
};
|
};
|
||||||
}),
|
}),
|
||||||
);
|
);
|
||||||
|
|
||||||
return routes.sort((a, b) =>
|
return routes.sort((a, b) =>
|
||||||
a.path > b.path ? 1 : b.path > a.path ? -1 : 0,
|
a.path > b.path ? 1 : b.path > a.path ? -1 : 0,
|
||||||
);
|
);
|
||||||
|
@ -331,8 +335,8 @@ export default function pluginContentDocs(
|
||||||
});
|
});
|
||||||
};
|
};
|
||||||
|
|
||||||
// If versioning is enabled, we cleverly chunk the generated routes to be by version
|
// If versioning is enabled, we cleverly chunk the generated routes
|
||||||
// and pick only needed base metadata
|
// to be by version and pick only needed base metadata.
|
||||||
if (versioning.enabled) {
|
if (versioning.enabled) {
|
||||||
const docsMetadataByVersion = _.groupBy(
|
const docsMetadataByVersion = _.groupBy(
|
||||||
Object.values(content.docsMetadata),
|
Object.values(content.docsMetadata),
|
||||||
|
@ -365,8 +369,9 @@ export default function pluginContentDocs(
|
||||||
version,
|
version,
|
||||||
};
|
};
|
||||||
|
|
||||||
// We want latest version route config to be placed last in the generated routeconfig.
|
// We want latest version route config to be placed last in the
|
||||||
// Otherwise, `/docs/next/foo` will match `/docs/:route` instead of `/docs/next/:route`
|
// generated routeconfig. Otherwise, `/docs/next/foo` will match
|
||||||
|
// `/docs/:route` instead of `/docs/next/:route`.
|
||||||
return addBaseRoute(
|
return addBaseRoute(
|
||||||
docsBaseRoute,
|
docsBaseRoute,
|
||||||
docsBaseMetadata,
|
docsBaseMetadata,
|
||||||
|
@ -410,7 +415,8 @@ export default function pluginContentDocs(
|
||||||
remarkPlugins,
|
remarkPlugins,
|
||||||
rehypePlugins,
|
rehypePlugins,
|
||||||
metadataPath: (mdxPath: string) => {
|
metadataPath: (mdxPath: string) => {
|
||||||
// Note that metadataPath must be the same/ in-sync as the path from createData for each MDX
|
// Note that metadataPath must be the same/in-sync as
|
||||||
|
// the path from createData for each MDX.
|
||||||
const aliasedSource = aliasedSitePath(mdxPath, siteDir);
|
const aliasedSource = aliasedSitePath(mdxPath, siteDir);
|
||||||
return path.join(
|
return path.join(
|
||||||
dataDir,
|
dataDir,
|
||||||
|
|
|
@ -31,7 +31,8 @@ export default async function getFileLastUpdate(
|
||||||
: {timestamp: +temp[1], author: temp[2]};
|
: {timestamp: +temp[1], author: temp[2]};
|
||||||
}
|
}
|
||||||
|
|
||||||
// Wrap in try/catch in case the shell commands fail (e.g. project doesn't use Git, etc).
|
// Wrap in try/catch in case the shell commands fail
|
||||||
|
// (e.g. project doesn't use Git, etc).
|
||||||
try {
|
try {
|
||||||
if (!shell.which('git')) {
|
if (!shell.which('git')) {
|
||||||
if (!showedGitRequirementError) {
|
if (!showedGitRequirementError) {
|
||||||
|
|
|
@ -64,6 +64,7 @@ export default function(
|
||||||
}
|
}
|
||||||
return modifiedLine;
|
return modifiedLine;
|
||||||
});
|
});
|
||||||
|
|
||||||
content = lines.join('\n');
|
content = lines.join('\n');
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -32,7 +32,7 @@ async function lastUpdated(
|
||||||
): Promise<LastUpdateData> {
|
): Promise<LastUpdateData> {
|
||||||
const {showLastUpdateAuthor, showLastUpdateTime} = options;
|
const {showLastUpdateAuthor, showLastUpdateTime} = options;
|
||||||
if (showLastUpdateAuthor || showLastUpdateTime) {
|
if (showLastUpdateAuthor || showLastUpdateTime) {
|
||||||
// Use fake data in dev for faster development
|
// Use fake data in dev for faster development.
|
||||||
const fileLastUpdateData =
|
const fileLastUpdateData =
|
||||||
process.env.NODE_ENV === 'production'
|
process.env.NODE_ENV === 'production'
|
||||||
? await lastUpdate(filePath)
|
? await lastUpdate(filePath)
|
||||||
|
@ -49,6 +49,7 @@ async function lastUpdated(
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
return {};
|
return {};
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -83,7 +84,7 @@ export default async function processMetadata({
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// The version portion of the url path. Eg: 'next', '1.0.0', and ''
|
// The version portion of the url path. Eg: 'next', '1.0.0', and ''.
|
||||||
const versionPath =
|
const versionPath =
|
||||||
version && version !== versioning.latestVersion ? version : '';
|
version && version !== versioning.latestVersion ? version : '';
|
||||||
|
|
||||||
|
@ -112,7 +113,7 @@ export default async function processMetadata({
|
||||||
|
|
||||||
const description: string = frontMatter.description || excerpt;
|
const description: string = frontMatter.description || excerpt;
|
||||||
|
|
||||||
// The last portion of the url path. Eg: 'foo/bar', 'bar'
|
// The last portion of the url path. Eg: 'foo/bar', 'bar'.
|
||||||
const routePath =
|
const routePath =
|
||||||
version && version !== 'next'
|
version && version !== 'next'
|
||||||
? id.replace(new RegExp(`^version-${version}/`), '')
|
? id.replace(new RegExp(`^version-${version}/`), '')
|
||||||
|
@ -126,8 +127,10 @@ export default async function processMetadata({
|
||||||
|
|
||||||
const {lastUpdatedAt, lastUpdatedBy} = await lastUpdatedPromise;
|
const {lastUpdatedAt, lastUpdatedBy} = await lastUpdatedPromise;
|
||||||
|
|
||||||
// Assign all of object properties during instantiation (if possible) for NodeJS optimization
|
// Assign all of object properties during instantiation (if possible) for
|
||||||
// Adding properties to object after instantiation will cause hidden class transitions.
|
// NodeJS optimization.
|
||||||
|
// Adding properties to object after instantiation will cause hidden
|
||||||
|
// class transitions.
|
||||||
const metadata: MetadataRaw = {
|
const metadata: MetadataRaw = {
|
||||||
id,
|
id,
|
||||||
title,
|
title,
|
||||||
|
|
|
@ -18,7 +18,7 @@ import {
|
||||||
} from './types';
|
} from './types';
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Check that item contains only allowed keys
|
* Check that item contains only allowed keys.
|
||||||
*/
|
*/
|
||||||
function assertItem(item: Object, keys: string[]): void {
|
function assertItem(item: Object, keys: string[]): void {
|
||||||
const unknownKeys = Object.keys(item).filter(
|
const unknownKeys = Object.keys(item).filter(
|
||||||
|
@ -72,8 +72,8 @@ function assertIsLink(item: any): asserts item is SidebarItemLink {
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Normalizes recursively item and all its children. Ensures, that at the end
|
* Normalizes recursively item and all its children. Ensures that at the end
|
||||||
* each item will be an object with the corresponding type
|
* each item will be an object with the corresponding type.
|
||||||
*/
|
*/
|
||||||
function normalizeItem(item: SidebarItemRaw): SidebarItem {
|
function normalizeItem(item: SidebarItemRaw): SidebarItem {
|
||||||
if (typeof item === 'string') {
|
if (typeof item === 'string') {
|
||||||
|
@ -102,7 +102,7 @@ function normalizeItem(item: SidebarItemRaw): SidebarItem {
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Converts sidebars object to mapping to arrays of sidebar item objects
|
* Converts sidebars object to mapping to arrays of sidebar item objects.
|
||||||
*/
|
*/
|
||||||
function normalizeSidebar(sidebars: SidebarRaw): Sidebar {
|
function normalizeSidebar(sidebars: SidebarRaw): Sidebar {
|
||||||
return Object.entries(sidebars).reduce(
|
return Object.entries(sidebars).reduce(
|
||||||
|
@ -110,7 +110,7 @@ function normalizeSidebar(sidebars: SidebarRaw): Sidebar {
|
||||||
let normalizedSidebar: SidebarItemRaw[];
|
let normalizedSidebar: SidebarItemRaw[];
|
||||||
|
|
||||||
if (!Array.isArray(sidebar)) {
|
if (!Array.isArray(sidebar)) {
|
||||||
// convert sidebar to a more generic structure
|
// Convert sidebar to a more generic structure.
|
||||||
normalizedSidebar = Object.entries(sidebar).map(([label, items]) => ({
|
normalizedSidebar = Object.entries(sidebar).map(([label, items]) => ({
|
||||||
type: 'category',
|
type: 'category',
|
||||||
label,
|
label,
|
||||||
|
@ -131,9 +131,11 @@ function normalizeSidebar(sidebars: SidebarRaw): Sidebar {
|
||||||
export default function loadSidebars(sidebarPaths?: string[]): Sidebar {
|
export default function loadSidebars(sidebarPaths?: string[]): Sidebar {
|
||||||
// We don't want sidebars to be cached because of hot reloading.
|
// We don't want sidebars to be cached because of hot reloading.
|
||||||
let allSidebars: SidebarRaw = {};
|
let allSidebars: SidebarRaw = {};
|
||||||
|
|
||||||
if (!sidebarPaths || !sidebarPaths.length) {
|
if (!sidebarPaths || !sidebarPaths.length) {
|
||||||
return {} as Sidebar;
|
return {} as Sidebar;
|
||||||
}
|
}
|
||||||
|
|
||||||
sidebarPaths.map(sidebarPath => {
|
sidebarPaths.map(sidebarPath => {
|
||||||
if (sidebarPath && fs.existsSync(sidebarPath)) {
|
if (sidebarPath && fs.existsSync(sidebarPath)) {
|
||||||
const sidebar = importFresh(sidebarPath) as SidebarRaw;
|
const sidebar = importFresh(sidebarPath) as SidebarRaw;
|
||||||
|
|
|
@ -25,18 +25,21 @@ export function docsVersion(
|
||||||
'No version tag specified!. Pass the version you wish to create as an argument. Ex: 1.0.0',
|
'No version tag specified!. Pass the version you wish to create as an argument. Ex: 1.0.0',
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
if (version.includes('/') || version.includes('\\')) {
|
if (version.includes('/') || version.includes('\\')) {
|
||||||
throw new Error(
|
throw new Error(
|
||||||
`Invalid version tag specified! Do not include slash (/) or (\\). Try something like: 1.0.0`,
|
`Invalid version tag specified! Do not include slash (/) or (\\). Try something like: 1.0.0`,
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
if (version.length > 32) {
|
if (version.length > 32) {
|
||||||
throw new Error(
|
throw new Error(
|
||||||
'Invalid version tag specified! Length must <= 32 characters. Try something like: 1.0.0',
|
'Invalid version tag specified! Length must <= 32 characters. Try something like: 1.0.0',
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
// Since we are going to create `version-${version}` folder, we need to make sure its a valid path name
|
// Since we are going to create `version-${version}` folder, we need to make
|
||||||
|
// sure it's a valid pathname.
|
||||||
if (/[<>:"\/\\|?*\x00-\x1F]/g.test(version)) {
|
if (/[<>:"\/\\|?*\x00-\x1F]/g.test(version)) {
|
||||||
throw new Error(
|
throw new Error(
|
||||||
'Invalid version tag specified! Please ensure its a valid pathname too. Try something like: 1.0.0',
|
'Invalid version tag specified! Please ensure its a valid pathname too. Try something like: 1.0.0',
|
||||||
|
@ -49,14 +52,14 @@ export function docsVersion(
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
// Load existing versions
|
// Load existing versions.
|
||||||
let versions = [];
|
let versions = [];
|
||||||
const versionsJSONFile = getVersionsJSONFile(siteDir);
|
const versionsJSONFile = getVersionsJSONFile(siteDir);
|
||||||
if (fs.existsSync(versionsJSONFile)) {
|
if (fs.existsSync(versionsJSONFile)) {
|
||||||
versions = JSON.parse(fs.readFileSync(versionsJSONFile, 'utf8'));
|
versions = JSON.parse(fs.readFileSync(versionsJSONFile, 'utf8'));
|
||||||
}
|
}
|
||||||
|
|
||||||
// Check if version already exist
|
// Check if version already exists.
|
||||||
if (versions.includes(version)) {
|
if (versions.includes(version)) {
|
||||||
throw new Error(
|
throw new Error(
|
||||||
'This version already exists!. Use a version tag that does not already exist.',
|
'This version already exists!. Use a version tag that does not already exist.',
|
||||||
|
@ -65,7 +68,7 @@ export function docsVersion(
|
||||||
|
|
||||||
const {path: docsPath, sidebarPath} = options;
|
const {path: docsPath, sidebarPath} = options;
|
||||||
|
|
||||||
// Copy docs files
|
// Copy docs files.
|
||||||
const docsDir = path.join(siteDir, docsPath);
|
const docsDir = path.join(siteDir, docsPath);
|
||||||
if (fs.existsSync(docsDir) && fs.readdirSync(docsDir).length > 0) {
|
if (fs.existsSync(docsDir) && fs.readdirSync(docsDir).length > 0) {
|
||||||
const versionedDir = getVersionedDocsDir(siteDir);
|
const versionedDir = getVersionedDocsDir(siteDir);
|
||||||
|
@ -75,11 +78,11 @@ export function docsVersion(
|
||||||
throw new Error('There is no docs to version !');
|
throw new Error('There is no docs to version !');
|
||||||
}
|
}
|
||||||
|
|
||||||
// Load current sidebar and create a new versioned sidebars file
|
// Load current sidebar and create a new versioned sidebars file.
|
||||||
if (fs.existsSync(sidebarPath)) {
|
if (fs.existsSync(sidebarPath)) {
|
||||||
const loadedSidebars: Sidebar = loadSidebars([sidebarPath]);
|
const loadedSidebars: Sidebar = loadSidebars([sidebarPath]);
|
||||||
|
|
||||||
// Transform id in original sidebar to versioned id
|
// Transform id in original sidebar to versioned id.
|
||||||
const normalizeItem = (item: SidebarItem): SidebarItem => {
|
const normalizeItem = (item: SidebarItem): SidebarItem => {
|
||||||
switch (item.type) {
|
switch (item.type) {
|
||||||
case 'category':
|
case 'category':
|
||||||
|
@ -117,7 +120,7 @@ export function docsVersion(
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
// update versions.json file
|
// Update versions.json file.
|
||||||
versions.unshift(version);
|
versions.unshift(version);
|
||||||
fs.ensureDirSync(path.dirname(versionsJSONFile));
|
fs.ensureDirSync(path.dirname(versionsJSONFile));
|
||||||
fs.writeFileSync(versionsJSONFile, `${JSON.stringify(versions, null, 2)}\n`);
|
fs.writeFileSync(versionsJSONFile, `${JSON.stringify(versions, null, 2)}\n`);
|
||||||
|
|
|
@ -15,7 +15,8 @@ export default (function() {
|
||||||
// Set page so that subsequent hits on this page are attributed
|
// Set page so that subsequent hits on this page are attributed
|
||||||
// to this page. This is recommended for Single-page Applications.
|
// to this page. This is recommended for Single-page Applications.
|
||||||
window.ga('set', 'page', location.pathname);
|
window.ga('set', 'page', location.pathname);
|
||||||
// Always refer to the variable on window in-case it gets overridden elsewhere.
|
// Always refer to the variable on window in-case it gets
|
||||||
|
// overridden elsewhere.
|
||||||
window.ga('send', 'pageview');
|
window.ga('send', 'pageview');
|
||||||
},
|
},
|
||||||
};
|
};
|
||||||
|
|
|
@ -26,7 +26,9 @@ module.exports = function(context) {
|
||||||
'Please ensure this is not a mistake.',
|
'Please ensure this is not a mistake.',
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
const isProd = process.env.NODE_ENV === 'production';
|
const isProd = process.env.NODE_ENV === 'production';
|
||||||
|
|
||||||
return {
|
return {
|
||||||
name: 'docusaurus-plugin-google-analytics',
|
name: 'docusaurus-plugin-google-analytics',
|
||||||
|
|
||||||
|
|
|
@ -28,6 +28,7 @@ module.exports = function(context) {
|
||||||
}
|
}
|
||||||
|
|
||||||
const isProd = process.env.NODE_ENV === 'production';
|
const isProd = process.env.NODE_ENV === 'production';
|
||||||
|
|
||||||
return {
|
return {
|
||||||
name: 'docusaurus-plugin-google-gtag',
|
name: 'docusaurus-plugin-google-gtag',
|
||||||
|
|
||||||
|
|
|
@ -12,6 +12,7 @@ import path from 'path';
|
||||||
|
|
||||||
export default function(_context: LoadContext, options: PluginOptions) {
|
export default function(_context: LoadContext, options: PluginOptions) {
|
||||||
const isProd = process.env.NODE_ENV === 'production';
|
const isProd = process.env.NODE_ENV === 'production';
|
||||||
|
|
||||||
return {
|
return {
|
||||||
name: 'docusaurus-plugin-ideal-image',
|
name: 'docusaurus-plugin-ideal-image',
|
||||||
|
|
||||||
|
|
|
@ -12,7 +12,7 @@ import createSitemap from './createSitemap';
|
||||||
import {LoadContext, Props} from '@docusaurus/types';
|
import {LoadContext, Props} from '@docusaurus/types';
|
||||||
|
|
||||||
const DEFAULT_OPTIONS: PluginOptions = {
|
const DEFAULT_OPTIONS: PluginOptions = {
|
||||||
cacheTime: 600 * 1000, // 600 sec - cache purge period
|
cacheTime: 600 * 1000, // 600 sec - cache purge period.
|
||||||
changefreq: 'weekly',
|
changefreq: 'weekly',
|
||||||
priority: 0.5,
|
priority: 0.5,
|
||||||
};
|
};
|
||||||
|
@ -27,14 +27,14 @@ export default function pluginSitemap(
|
||||||
name: 'docusaurus-plugin-sitemap',
|
name: 'docusaurus-plugin-sitemap',
|
||||||
|
|
||||||
async postBuild({siteConfig, routesPaths, outDir}: Props) {
|
async postBuild({siteConfig, routesPaths, outDir}: Props) {
|
||||||
// Generate sitemap
|
// Generate sitemap.
|
||||||
const generatedSitemap = createSitemap(
|
const generatedSitemap = createSitemap(
|
||||||
siteConfig,
|
siteConfig,
|
||||||
routesPaths,
|
routesPaths,
|
||||||
options,
|
options,
|
||||||
).toString();
|
).toString();
|
||||||
|
|
||||||
// Write sitemap file
|
// Write sitemap file.
|
||||||
const sitemapPath = path.join(outDir, 'sitemap.xml');
|
const sitemapPath = path.join(outDir, 'sitemap.xml');
|
||||||
fs.writeFile(sitemapPath, generatedSitemap, err => {
|
fs.writeFile(sitemapPath, generatedSitemap, err => {
|
||||||
if (err) {
|
if (err) {
|
||||||
|
|
|
@ -12,15 +12,18 @@ const addAdmonitions = pluginOptions => {
|
||||||
remarkPlugins: [admonitions],
|
remarkPlugins: [admonitions],
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
if (pluginOptions.admonitions === false) {
|
if (pluginOptions.admonitions === false) {
|
||||||
return pluginOptions;
|
return pluginOptions;
|
||||||
}
|
}
|
||||||
|
|
||||||
const admonitionsOptions = {
|
const admonitionsOptions = {
|
||||||
remarkPlugins: (pluginOptions.remarkPlugins || []).concat([
|
remarkPlugins: (pluginOptions.remarkPlugins || []).concat([
|
||||||
admonitions,
|
admonitions,
|
||||||
pluginOptions.admonitions || {},
|
pluginOptions.admonitions || {},
|
||||||
]),
|
]),
|
||||||
};
|
};
|
||||||
|
|
||||||
return {
|
return {
|
||||||
...pluginOptions,
|
...pluginOptions,
|
||||||
...admonitionsOptions,
|
...admonitionsOptions,
|
||||||
|
@ -39,7 +42,7 @@ module.exports = function preset(context, opts = {}) {
|
||||||
return {
|
return {
|
||||||
themes: [
|
themes: [
|
||||||
['@docusaurus/theme-classic', opts.theme],
|
['@docusaurus/theme-classic', opts.theme],
|
||||||
// Don't add this if algolia config is not defined
|
// Don't add this if algolia config is not defined.
|
||||||
algolia && '@docusaurus/theme-search-algolia',
|
algolia && '@docusaurus/theme-search-algolia',
|
||||||
],
|
],
|
||||||
plugins: [
|
plugins: [
|
||||||
|
|
|
@ -11,7 +11,7 @@ import useDocusaurusContext from '@docusaurus/useDocusaurusContext';
|
||||||
import useBaseUrl from '@docusaurus/useBaseUrl';
|
import useBaseUrl from '@docusaurus/useBaseUrl';
|
||||||
import useLockBodyScroll from '@theme/hooks/useLockBodyScroll';
|
import useLockBodyScroll from '@theme/hooks/useLockBodyScroll';
|
||||||
import Link from '@docusaurus/Link';
|
import Link from '@docusaurus/Link';
|
||||||
import isInternalUrl from '@docusaurus/utils'; // eslint-disable-line import/no-extraneous-dependencies
|
import isInternalUrl from '@docusaurus/isInternalUrl';
|
||||||
|
|
||||||
import styles from './styles.module.css';
|
import styles from './styles.module.css';
|
||||||
|
|
||||||
|
|
|
@ -5,4 +5,4 @@
|
||||||
* LICENSE file in the root directory of this source tree.
|
* LICENSE file in the root directory of this source tree.
|
||||||
*/
|
*/
|
||||||
|
|
||||||
export {default} from '@docusaurus/Noop';
|
export {default} from '@docusaurus/noop';
|
||||||
|
|
|
@ -19,7 +19,8 @@ module.exports = function() {
|
||||||
return {
|
return {
|
||||||
resolve: {
|
resolve: {
|
||||||
alias: {
|
alias: {
|
||||||
// fork of Buble which removes Buble's large dependency and weighs in at a smaller size of ~51kB
|
// fork of Buble which removes Buble's large dependency and weighs in
|
||||||
|
// at a smaller size of ~51kB
|
||||||
// https://github.com/FormidableLabs/react-live#what-bundle-size-can-i-expect
|
// https://github.com/FormidableLabs/react-live#what-bundle-size-can-i-expect
|
||||||
buble: '@philpl/buble',
|
buble: '@philpl/buble',
|
||||||
},
|
},
|
||||||
|
|
|
@ -16,7 +16,7 @@ module.exports = function() {
|
||||||
},
|
},
|
||||||
|
|
||||||
configureWebpack() {
|
configureWebpack() {
|
||||||
// Ensure that algolia docsearch css is its own chunk
|
// Ensure that algolia docsearch styles is its own chunk.
|
||||||
return {
|
return {
|
||||||
optimization: {
|
optimization: {
|
||||||
splitChunks: {
|
splitChunks: {
|
||||||
|
@ -26,7 +26,7 @@ module.exports = function() {
|
||||||
test: /algolia\.css$/,
|
test: /algolia\.css$/,
|
||||||
chunks: `all`,
|
chunks: `all`,
|
||||||
enforce: true,
|
enforce: true,
|
||||||
// Set priority higher than docusaurus single-css extraction
|
// Set priority higher than docusaurus single-css extraction.
|
||||||
priority: 60,
|
priority: 60,
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
|
|
|
@ -33,12 +33,13 @@ const Search = props => {
|
||||||
// navigation and avoiding a full page refresh.
|
// navigation and avoiding a full page refresh.
|
||||||
handleSelected: (_input, _event, suggestion) => {
|
handleSelected: (_input, _event, suggestion) => {
|
||||||
// Use an anchor tag to parse the absolute url into a relative url
|
// Use an anchor tag to parse the absolute url into a relative url
|
||||||
// Alternatively, we can use new URL(suggestion.url) but its not supported in IE
|
// Alternatively, we can use new URL(suggestion.url) but it's not supported in IE.
|
||||||
const a = document.createElement('a');
|
const a = document.createElement('a');
|
||||||
a.href = suggestion.url;
|
a.href = suggestion.url;
|
||||||
|
|
||||||
// Algolia use closest parent element id #__docusaurus when a h1 page title does not have an id
|
// Algolia use closest parent element id #__docusaurus when a h1 page title does
|
||||||
// So, we can safely remove it. See https://github.com/facebook/docusaurus/issues/1828 for more details.
|
// not have an id, so we can safely remove it.
|
||||||
|
// See https://github.com/facebook/docusaurus/issues/1828 for more details.
|
||||||
const routePath =
|
const routePath =
|
||||||
`#__docusaurus` === a.hash
|
`#__docusaurus` === a.hash
|
||||||
? `${a.pathname}`
|
? `${a.pathname}`
|
||||||
|
|
|
@ -29,9 +29,9 @@ export async function generate(
|
||||||
|
|
||||||
let lastHash = fileHash.get(filepath);
|
let lastHash = fileHash.get(filepath);
|
||||||
|
|
||||||
// If file already exist but its not in runtime cache hash yet,
|
// If file already exists but its not in runtime cache yet,
|
||||||
// we try to calculate the content hash and then compare
|
// we try to calculate the content hash and then compare
|
||||||
// This is to avoid unnecessary overwrite and we can reuse old file
|
// This is to avoid unnecessary overwriting and we can reuse old file.
|
||||||
if (!lastHash && fs.existsSync(filepath)) {
|
if (!lastHash && fs.existsSync(filepath)) {
|
||||||
const lastContent = await fs.readFile(filepath, 'utf8');
|
const lastContent = await fs.readFile(filepath, 'utf8');
|
||||||
lastHash = createHash('md5')
|
lastHash = createHash('md5')
|
||||||
|
@ -64,7 +64,8 @@ const indexRE = /(^|.*\/)index\.(md|js|jsx|ts|tsx)$/i;
|
||||||
const extRE = /\.(md|js)$/;
|
const extRE = /\.(md|js)$/;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Convert filepath to url path. Example: 'index.md' -> '/', 'foo/bar.js' -> '/foo/bar',
|
* Convert filepath to url path.
|
||||||
|
* Example: 'index.md' -> '/', 'foo/bar.js' -> '/foo/bar',
|
||||||
*/
|
*/
|
||||||
export function fileToPath(file: string): string {
|
export function fileToPath(file: string): string {
|
||||||
if (indexRE.test(file)) {
|
if (indexRE.test(file)) {
|
||||||
|
@ -81,7 +82,8 @@ export function encodePath(userpath: string): string {
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Given an input string, convert to kebab-case and append a hash. Avoid str collision
|
* Given an input string, convert to kebab-case and append a hash.
|
||||||
|
* Avoid str collision.
|
||||||
*/
|
*/
|
||||||
export function docuHash(str: string): string {
|
export function docuHash(str: string): string {
|
||||||
if (str === '/') {
|
if (str === '/') {
|
||||||
|
@ -95,7 +97,8 @@ export function docuHash(str: string): string {
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Generate unique React Component Name. E.g: /foo-bar -> FooBar096
|
* Generate unique React Component Name.
|
||||||
|
* E.g: /foo-bar -> FooBar096
|
||||||
*/
|
*/
|
||||||
export function genComponentName(pagePath: string): string {
|
export function genComponentName(pagePath: string): string {
|
||||||
if (pagePath === '/') {
|
if (pagePath === '/') {
|
||||||
|
@ -107,7 +110,8 @@ export function genComponentName(pagePath: string): string {
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Convert Windows backslash paths to posix style paths. E.g: endi\\lie -> endi/lie
|
* Convert Windows backslash paths to posix style paths.
|
||||||
|
* E.g: endi\\lie -> endi/lie
|
||||||
*/
|
*/
|
||||||
export function posixPath(str: string): string {
|
export function posixPath(str: string): string {
|
||||||
const isExtendedLengthPath = /^\\\\\?\\/.test(str);
|
const isExtendedLengthPath = /^\\\\\?\\/.test(str);
|
||||||
|
@ -121,7 +125,7 @@ export function posixPath(str: string): string {
|
||||||
|
|
||||||
const chunkNameCache = new Map();
|
const chunkNameCache = new Map();
|
||||||
/**
|
/**
|
||||||
* Generate unique chunk name given a module path
|
* Generate unique chunk name given a module path.
|
||||||
*/
|
*/
|
||||||
export function genChunkName(
|
export function genChunkName(
|
||||||
modulePath: string,
|
modulePath: string,
|
||||||
|
@ -164,7 +168,7 @@ export function idx(target: any, keyPaths?: string | (string | number)[]): any {
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Given a filepath and dirpath, get the first directory
|
* Given a filepath and dirpath, get the first directory.
|
||||||
*/
|
*/
|
||||||
export function getSubFolder(file: string, refDir: string): string | null {
|
export function getSubFolder(file: string, refDir: string): string | null {
|
||||||
const separator = escapeStringRegexp(path.sep);
|
const separator = escapeStringRegexp(path.sep);
|
||||||
|
@ -193,6 +197,7 @@ export function parse(
|
||||||
.shift();
|
.shift();
|
||||||
},
|
},
|
||||||
};
|
};
|
||||||
|
|
||||||
const {data: frontMatter, content, excerpt} = matter(fileString, options);
|
const {data: frontMatter, content, excerpt} = matter(fileString, options);
|
||||||
return {frontMatter, content, excerpt};
|
return {frontMatter, content, excerpt};
|
||||||
}
|
}
|
||||||
|
@ -238,27 +243,30 @@ export function normalizeUrl(rawUrls: string[]): string {
|
||||||
}
|
}
|
||||||
|
|
||||||
let str = resultArray.join('/');
|
let str = resultArray.join('/');
|
||||||
// Each input component is now separated by a single slash except the possible first plain protocol part.
|
// Each input component is now separated by a single slash
|
||||||
|
// except the possible first plain protocol part.
|
||||||
|
|
||||||
// remove trailing slash before parameters or hash
|
// Remove trailing slash before parameters or hash.
|
||||||
str = str.replace(/\/(\?|&|#[^!])/g, '$1');
|
str = str.replace(/\/(\?|&|#[^!])/g, '$1');
|
||||||
|
|
||||||
// replace ? in parameters with &
|
// Replace ? in parameters with &.
|
||||||
const parts = str.split('?');
|
const parts = str.split('?');
|
||||||
str = parts.shift() + (parts.length > 0 ? '?' : '') + parts.join('&');
|
str = parts.shift() + (parts.length > 0 ? '?' : '') + parts.join('&');
|
||||||
|
|
||||||
// dedupe forward slashes
|
// Dedupe forward slashes.
|
||||||
str = str.replace(/^\/+/, '/');
|
str = str.replace(/^\/+/, '/');
|
||||||
|
|
||||||
return str;
|
return str;
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Alias filepath relative to site directory, very useful so that we don't expose user's site structure.
|
* Alias filepath relative to site directory, very useful so that we
|
||||||
|
* don't expose user's site structure.
|
||||||
* Example: some/path/to/website/docs/foo.md -> @site/docs/foo.md
|
* Example: some/path/to/website/docs/foo.md -> @site/docs/foo.md
|
||||||
*/
|
*/
|
||||||
export function aliasedSitePath(filePath: string, siteDir: string) {
|
export function aliasedSitePath(filePath: string, siteDir: string) {
|
||||||
const relativePath = path.relative(siteDir, filePath);
|
const relativePath = path.relative(siteDir, filePath);
|
||||||
// Cannot use path.join() as it resolves '../' and removes the '@site'. Let webpack loader resolve it.
|
// Cannot use path.join() as it resolves '../' and removes
|
||||||
|
// the '@site'. Let webpack loader resolve it.
|
||||||
return `@site/${relativePath}`;
|
return `@site/${relativePath}`;
|
||||||
}
|
}
|
||||||
|
|
|
@ -15,7 +15,7 @@ const fetched = {};
|
||||||
const loaded = {};
|
const loaded = {};
|
||||||
|
|
||||||
const isSlowConnection = () => {
|
const isSlowConnection = () => {
|
||||||
// if user is on slow or constrained connection
|
// If user is on slow or constrained connection.
|
||||||
if (`connection` in navigator) {
|
if (`connection` in navigator) {
|
||||||
if (
|
if (
|
||||||
(navigator.connection.effectiveType || ``).indexOf(`2g`) !== -1 &&
|
(navigator.connection.effectiveType || ``).indexOf(`2g`) !== -1 &&
|
||||||
|
@ -37,10 +37,10 @@ const docusaurus = {
|
||||||
if (!canPrefetch(routePath)) {
|
if (!canPrefetch(routePath)) {
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
// prevent future duplicate prefetch of routePath
|
// Prevent future duplicate prefetch of routePath.
|
||||||
fetched[routePath] = true;
|
fetched[routePath] = true;
|
||||||
|
|
||||||
// Find all webpack chunk names needed
|
// Find all webpack chunk names needed.
|
||||||
const matches = matchRoutes(routes, routePath);
|
const matches = matchRoutes(routes, routePath);
|
||||||
const chunkNamesNeeded = matches.reduce((arr, match) => {
|
const chunkNamesNeeded = matches.reduce((arr, match) => {
|
||||||
const chunk = routesChunkNames[match.route.path];
|
const chunk = routesChunkNames[match.route.path];
|
||||||
|
@ -52,25 +52,28 @@ const docusaurus = {
|
||||||
return arr.concat(chunkNames);
|
return arr.concat(chunkNames);
|
||||||
}, []);
|
}, []);
|
||||||
|
|
||||||
// Prefetch all webpack chunk assets file needed
|
// Prefetch all webpack chunk assets file needed.
|
||||||
chunkNamesNeeded.forEach(chunkName => {
|
chunkNamesNeeded.forEach(chunkName => {
|
||||||
// "__webpack_require__.gca" is a custom function provided by ChunkAssetPlugin
|
// "__webpack_require__.gca" is a custom function provided by ChunkAssetPlugin.
|
||||||
// Pass it the chunkName or chunkId you want to load and it will return the URL for that chunk
|
// Pass it the chunkName or chunkId you want to load and it will return the URL for that chunk.
|
||||||
// eslint-disable-next-line no-undef
|
// eslint-disable-next-line no-undef
|
||||||
const chunkAsset = __webpack_require__.gca(chunkName);
|
const chunkAsset = __webpack_require__.gca(chunkName);
|
||||||
|
|
||||||
// In some cases, webpack might decide to optimize further & hence the chunk asssets are merged to another chunk/previous chunk
|
// In some cases, webpack might decide to optimize further & hence the chunk assets are merged to another chunk/previous chunk.
|
||||||
// Hence, we can safely filter it out/ dont need to load it
|
// Hence, we can safely filter it out/don't need to load it.
|
||||||
if (chunkAsset && !/undefined/.test(chunkAsset)) {
|
if (chunkAsset && !/undefined/.test(chunkAsset)) {
|
||||||
prefetchHelper(chunkAsset);
|
prefetchHelper(chunkAsset);
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
return true;
|
return true;
|
||||||
},
|
},
|
||||||
|
|
||||||
preload: routePath => {
|
preload: routePath => {
|
||||||
if (!canPreload(routePath)) {
|
if (!canPreload(routePath)) {
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
|
||||||
loaded[routePath] = true;
|
loaded[routePath] = true;
|
||||||
preloadHelper(routes, routePath);
|
preloadHelper(routes, routePath);
|
||||||
return true;
|
return true;
|
||||||
|
|
|
@ -55,7 +55,7 @@ function ComponentCreator(path) {
|
||||||
modules: optsModules,
|
modules: optsModules,
|
||||||
webpack: () => optsWebpack,
|
webpack: () => optsWebpack,
|
||||||
render: (loaded, props) => {
|
render: (loaded, props) => {
|
||||||
// clone the original object since we don't want to alter the original.
|
// Clone the original object since we don't want to alter the original.
|
||||||
const loadedModules = JSON.parse(JSON.stringify(chunkNames));
|
const loadedModules = JSON.parse(JSON.stringify(chunkNames));
|
||||||
Object.keys(loaded).forEach(key => {
|
Object.keys(loaded).forEach(key => {
|
||||||
let val = loadedModules;
|
let val = loadedModules;
|
||||||
|
|
|
@ -7,7 +7,7 @@
|
||||||
|
|
||||||
import React, {useEffect, useRef} from 'react';
|
import React, {useEffect, useRef} from 'react';
|
||||||
import {NavLink} from 'react-router-dom';
|
import {NavLink} from 'react-router-dom';
|
||||||
import isInternalUrl from '@docusaurus/utils';
|
import isInternalUrl from '@docusaurus/isInternalUrl';
|
||||||
|
|
||||||
function Link(props) {
|
function Link(props) {
|
||||||
const {to, href} = props;
|
const {to, href} = props;
|
||||||
|
@ -23,7 +23,7 @@ function Link(props) {
|
||||||
io = new window.IntersectionObserver(entries => {
|
io = new window.IntersectionObserver(entries => {
|
||||||
entries.forEach(entry => {
|
entries.forEach(entry => {
|
||||||
if (el === entry.target) {
|
if (el === entry.target) {
|
||||||
// If element is in viewport, stop listening/observing & run callback.
|
// If element is in viewport, stop listening/observing and run callback.
|
||||||
// https://developer.mozilla.org/en-US/docs/Web/API/Intersection_Observer_API
|
// https://developer.mozilla.org/en-US/docs/Web/API/Intersection_Observer_API
|
||||||
if (entry.isIntersecting || entry.intersectionRatio > 0) {
|
if (entry.isIntersecting || entry.intersectionRatio > 0) {
|
||||||
io.unobserve(el);
|
io.unobserve(el);
|
||||||
|
@ -33,13 +33,14 @@ function Link(props) {
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
// Add element to the observer
|
|
||||||
|
// Add element to the observer.
|
||||||
io.observe(el);
|
io.observe(el);
|
||||||
};
|
};
|
||||||
|
|
||||||
const handleRef = ref => {
|
const handleRef = ref => {
|
||||||
if (IOSupported && ref && isInternal) {
|
if (IOSupported && ref && isInternal) {
|
||||||
// If IO supported and element reference found, setup Observer functionality
|
// If IO supported and element reference found, setup Observer functionality.
|
||||||
handleIntersection(ref, () => {
|
handleIntersection(ref, () => {
|
||||||
window.docusaurus.prefetch(targetLink);
|
window.docusaurus.prefetch(targetLink);
|
||||||
});
|
});
|
||||||
|
@ -54,11 +55,12 @@ function Link(props) {
|
||||||
};
|
};
|
||||||
|
|
||||||
useEffect(() => {
|
useEffect(() => {
|
||||||
// If IO is not supported. We prefetch by default (only once)
|
// If IO is not supported. We prefetch by default (only once).
|
||||||
if (!IOSupported && isInternal) {
|
if (!IOSupported && isInternal) {
|
||||||
window.docusaurus.prefetch(targetLink);
|
window.docusaurus.prefetch(targetLink);
|
||||||
}
|
}
|
||||||
// when unmount, stops intersection observer from watching
|
|
||||||
|
// When unmounting, stop intersection observer from watching.
|
||||||
return () => {
|
return () => {
|
||||||
if (IOSupported && io) {
|
if (IOSupported && io) {
|
||||||
io.disconnect();
|
io.disconnect();
|
||||||
|
|
|
@ -19,8 +19,10 @@ export default function useBaseUrl(url) {
|
||||||
if (externalRegex.test(url)) {
|
if (externalRegex.test(url)) {
|
||||||
return url;
|
return url;
|
||||||
}
|
}
|
||||||
|
|
||||||
if (url.startsWith('/')) {
|
if (url.startsWith('/')) {
|
||||||
return baseUrl + url.slice(1);
|
return baseUrl + url.slice(1);
|
||||||
}
|
}
|
||||||
|
|
||||||
return baseUrl + url;
|
return baseUrl + url;
|
||||||
}
|
}
|
||||||
|
|
|
@ -23,6 +23,7 @@ function flat(target) {
|
||||||
output[newKey] = value;
|
output[newKey] = value;
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
step(target);
|
step(target);
|
||||||
return output;
|
return output;
|
||||||
}
|
}
|
||||||
|
|
|
@ -5,10 +5,11 @@
|
||||||
* LICENSE file in the root directory of this source tree.
|
* LICENSE file in the root directory of this source tree.
|
||||||
*/
|
*/
|
||||||
|
|
||||||
const support = function(feature) {
|
function support(feature) {
|
||||||
if (typeof document === 'undefined') {
|
if (typeof document === 'undefined') {
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
|
||||||
const fakeLink = document.createElement('link');
|
const fakeLink = document.createElement('link');
|
||||||
try {
|
try {
|
||||||
if (fakeLink.relList && typeof fakeLink.relList.supports === 'function') {
|
if (fakeLink.relList && typeof fakeLink.relList.supports === 'function') {
|
||||||
|
@ -17,10 +18,11 @@ const support = function(feature) {
|
||||||
} catch (err) {
|
} catch (err) {
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
return false;
|
|
||||||
};
|
|
||||||
|
|
||||||
const linkPrefetchStrategy = function(url) {
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
function linkPrefetchStrategy(url) {
|
||||||
return new Promise((resolve, reject) => {
|
return new Promise((resolve, reject) => {
|
||||||
if (typeof document === 'undefined') {
|
if (typeof document === 'undefined') {
|
||||||
reject();
|
reject();
|
||||||
|
@ -39,9 +41,9 @@ const linkPrefetchStrategy = function(url) {
|
||||||
document.getElementsByName('script')[0].parentNode;
|
document.getElementsByName('script')[0].parentNode;
|
||||||
parentElement.appendChild(link);
|
parentElement.appendChild(link);
|
||||||
});
|
});
|
||||||
};
|
}
|
||||||
|
|
||||||
const xhrPrefetchStrategy = function(url) {
|
function xhrPrefetchStrategy(url) {
|
||||||
return new Promise((resolve, reject) => {
|
return new Promise((resolve, reject) => {
|
||||||
const req = new XMLHttpRequest();
|
const req = new XMLHttpRequest();
|
||||||
req.open('GET', url, true);
|
req.open('GET', url, true);
|
||||||
|
@ -57,7 +59,7 @@ const xhrPrefetchStrategy = function(url) {
|
||||||
|
|
||||||
req.send(null);
|
req.send(null);
|
||||||
});
|
});
|
||||||
};
|
}
|
||||||
|
|
||||||
const supportedPrefetchStrategy = support('prefetch')
|
const supportedPrefetchStrategy = support('prefetch')
|
||||||
? linkPrefetchStrategy
|
? linkPrefetchStrategy
|
||||||
|
@ -65,7 +67,7 @@ const supportedPrefetchStrategy = support('prefetch')
|
||||||
|
|
||||||
const preFetched = {};
|
const preFetched = {};
|
||||||
|
|
||||||
const prefetch = function(url) {
|
function prefetch(url) {
|
||||||
return new Promise(resolve => {
|
return new Promise(resolve => {
|
||||||
if (preFetched[url]) {
|
if (preFetched[url]) {
|
||||||
resolve();
|
resolve();
|
||||||
|
@ -77,8 +79,8 @@ const prefetch = function(url) {
|
||||||
resolve();
|
resolve();
|
||||||
preFetched[url] = true;
|
preFetched[url] = true;
|
||||||
})
|
})
|
||||||
.catch(() => {}); // 404s are logged to the console anyway
|
.catch(() => {}); // 404s are logged to the console anyway.
|
||||||
});
|
});
|
||||||
};
|
}
|
||||||
|
|
||||||
export default prefetch;
|
export default prefetch;
|
||||||
|
|
|
@ -8,8 +8,8 @@
|
||||||
import {matchRoutes} from 'react-router-config';
|
import {matchRoutes} from 'react-router-config';
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Helper function to make sure all async component for that particular route
|
* Helper function to make sure all async components for that particular route
|
||||||
* is preloaded before rendering. This is especially useful to avoid loading screens
|
* is preloaded before rendering. This is especially useful to avoid loading screens.
|
||||||
*
|
*
|
||||||
* @param {Array<RouteConfig>} routes react-router-config
|
* @param {Array<RouteConfig>} routes react-router-config
|
||||||
* @param {string} pathname the route pathname, example: /docs/installation
|
* @param {string} pathname the route pathname, example: /docs/installation
|
||||||
|
@ -17,12 +17,15 @@ import {matchRoutes} from 'react-router-config';
|
||||||
*/
|
*/
|
||||||
export default function preload(routes, pathname) {
|
export default function preload(routes, pathname) {
|
||||||
const matches = matchRoutes(routes, pathname);
|
const matches = matchRoutes(routes, pathname);
|
||||||
|
|
||||||
return Promise.all(
|
return Promise.all(
|
||||||
matches.map(match => {
|
matches.map(match => {
|
||||||
const {component} = match.route;
|
const {component} = match.route;
|
||||||
|
|
||||||
if (component && component.preload) {
|
if (component && component.preload) {
|
||||||
return component.preload();
|
return component.preload();
|
||||||
}
|
}
|
||||||
|
|
||||||
return undefined;
|
return undefined;
|
||||||
}),
|
}),
|
||||||
);
|
);
|
||||||
|
|
|
@ -21,7 +21,7 @@ import preload from './preload';
|
||||||
import App from './App';
|
import App from './App';
|
||||||
import ssrTemplate from './templates/ssr.html.template';
|
import ssrTemplate from './templates/ssr.html.template';
|
||||||
|
|
||||||
// Renderer for static-site-generator-webpack-plugin (async rendering via promises)
|
// Renderer for static-site-generator-webpack-plugin (async rendering via promises).
|
||||||
export default async function render(locals) {
|
export default async function render(locals) {
|
||||||
const {routesLocation, headTags, preBodyTags, postBodyTags} = locals;
|
const {routesLocation, headTags, preBodyTags, postBodyTags} = locals;
|
||||||
const location = routesLocation[locals.path];
|
const location = routesLocation[locals.path];
|
||||||
|
@ -50,7 +50,8 @@ export default async function render(locals) {
|
||||||
const manifestPath = path.join(generatedFilesDir, 'client-manifest.json');
|
const manifestPath = path.join(generatedFilesDir, 'client-manifest.json');
|
||||||
const manifest = JSON.parse(await fs.readFile(manifestPath, 'utf8'));
|
const manifest = JSON.parse(await fs.readFile(manifestPath, 'utf8'));
|
||||||
|
|
||||||
// Get all required assets for this particular page based on client manifest information
|
// Get all required assets for this particular page based on client
|
||||||
|
// manifest information.
|
||||||
const modulesToBeLoaded = [...manifest.entrypoints, ...Array.from(modules)];
|
const modulesToBeLoaded = [...manifest.entrypoints, ...Array.from(modules)];
|
||||||
const bundles = getBundles(manifest, modulesToBeLoaded);
|
const bundles = getBundles(manifest, modulesToBeLoaded);
|
||||||
const stylesheets = (bundles.css || []).map(b => b.file);
|
const stylesheets = (bundles.css || []).map(b => b.file);
|
||||||
|
@ -76,7 +77,7 @@ export default async function render(locals) {
|
||||||
},
|
},
|
||||||
);
|
);
|
||||||
|
|
||||||
// minify html with https://github.com/DanielRuf/html-minifier-terser
|
// Minify html with https://github.com/DanielRuf/html-minifier-terser
|
||||||
return minify(renderedHtml, {
|
return minify(renderedHtml, {
|
||||||
removeComments: true,
|
removeComments: true,
|
||||||
removeRedundantAttributes: true,
|
removeRedundantAttributes: true,
|
||||||
|
|
|
@ -67,7 +67,7 @@ export async function build(
|
||||||
new CleanWebpackPlugin({verbose: false}),
|
new CleanWebpackPlugin({verbose: false}),
|
||||||
// Visualize size of webpack output files with an interactive zoomable treemap.
|
// Visualize size of webpack output files with an interactive zoomable treemap.
|
||||||
cliOptions.bundleAnalyzer && new BundleAnalyzerPlugin(),
|
cliOptions.bundleAnalyzer && new BundleAnalyzerPlugin(),
|
||||||
// Generate client manifests file that will be used for server bundle
|
// Generate client manifests file that will be used for server bundle.
|
||||||
new ReactLoadableSSRAddon({
|
new ReactLoadableSSRAddon({
|
||||||
filename: clientManifestPath,
|
filename: clientManifestPath,
|
||||||
}),
|
}),
|
||||||
|
@ -90,17 +90,19 @@ export async function build(
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
// Plugin lifecycle - configureWebpack
|
// Plugin Lifecycle - configureWebpack.
|
||||||
plugins.forEach(plugin => {
|
plugins.forEach(plugin => {
|
||||||
const {configureWebpack} = plugin;
|
const {configureWebpack} = plugin;
|
||||||
if (!configureWebpack) {
|
if (!configureWebpack) {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
clientConfig = applyConfigureWebpack(
|
clientConfig = applyConfigureWebpack(
|
||||||
configureWebpack.bind(plugin), // The plugin lifecycle may reference `this`.
|
configureWebpack.bind(plugin), // The plugin lifecycle may reference `this`.
|
||||||
clientConfig,
|
clientConfig,
|
||||||
false,
|
false,
|
||||||
);
|
);
|
||||||
|
|
||||||
serverConfig = applyConfigureWebpack(
|
serverConfig = applyConfigureWebpack(
|
||||||
configureWebpack.bind(plugin), // The plugin lifecycle may reference `this`.
|
configureWebpack.bind(plugin), // The plugin lifecycle may reference `this`.
|
||||||
serverConfig,
|
serverConfig,
|
||||||
|
@ -108,7 +110,8 @@ export async function build(
|
||||||
);
|
);
|
||||||
});
|
});
|
||||||
|
|
||||||
// Make sure generated client-manifest is cleaned first so we don't reuse the one from prevs build
|
// Make sure generated client-manifest is cleaned first so we don't reuse
|
||||||
|
// the one from previous builds.
|
||||||
if (fs.existsSync(clientManifestPath)) {
|
if (fs.existsSync(clientManifestPath)) {
|
||||||
fs.unlinkSync(clientManifestPath);
|
fs.unlinkSync(clientManifestPath);
|
||||||
}
|
}
|
||||||
|
@ -116,7 +119,7 @@ export async function build(
|
||||||
// Run webpack to build JS bundle (client) and static html files (server).
|
// Run webpack to build JS bundle (client) and static html files (server).
|
||||||
await compile([clientConfig, serverConfig]);
|
await compile([clientConfig, serverConfig]);
|
||||||
|
|
||||||
// Remove server.bundle.js because it is useless
|
// Remove server.bundle.js because it is not needed.
|
||||||
if (
|
if (
|
||||||
serverConfig.output &&
|
serverConfig.output &&
|
||||||
serverConfig.output.filename &&
|
serverConfig.output.filename &&
|
||||||
|
@ -128,7 +131,7 @@ export async function build(
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
/* Plugin lifecycle - postBuild */
|
// Plugin Lifecycle - postBuild.
|
||||||
await Promise.all(
|
await Promise.all(
|
||||||
plugins.map(async plugin => {
|
plugins.map(async plugin => {
|
||||||
if (!plugin.postBuild) {
|
if (!plugin.postBuild) {
|
||||||
|
|
|
@ -27,7 +27,7 @@ export async function deploy(siteDir: string): Promise<void> {
|
||||||
throw new Error(`Please set the GIT_USER`);
|
throw new Error(`Please set the GIT_USER`);
|
||||||
}
|
}
|
||||||
|
|
||||||
// The branch that contains the latest docs changes that will be deployed
|
// The branch that contains the latest docs changes that will be deployed.
|
||||||
const currentBranch =
|
const currentBranch =
|
||||||
process.env.CURRENT_BRANCH ||
|
process.env.CURRENT_BRANCH ||
|
||||||
shell.exec('git rev-parse --abbrev-ref HEAD').stdout.trim();
|
shell.exec('git rev-parse --abbrev-ref HEAD').stdout.trim();
|
||||||
|
@ -52,7 +52,7 @@ export async function deploy(siteDir: string): Promise<void> {
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
// We never deploy on pull request
|
// We never deploy on pull request.
|
||||||
const isPullRequest =
|
const isPullRequest =
|
||||||
process.env.CI_PULL_REQUEST || process.env.CIRCLE_PULL_REQUEST;
|
process.env.CI_PULL_REQUEST || process.env.CIRCLE_PULL_REQUEST;
|
||||||
if (isPullRequest) {
|
if (isPullRequest) {
|
||||||
|
@ -72,7 +72,7 @@ export async function deploy(siteDir: string): Promise<void> {
|
||||||
? `git@${githubHost}:${organizationName}/${projectName}.git`
|
? `git@${githubHost}:${organizationName}/${projectName}.git`
|
||||||
: `https://${gitUser}@${githubHost}/${organizationName}/${projectName}.git`;
|
: `https://${gitUser}@${githubHost}/${organizationName}/${projectName}.git`;
|
||||||
|
|
||||||
// Check if this is a cross-repo publish
|
// Check if this is a cross-repo publish.
|
||||||
const currentRepoUrl = shell
|
const currentRepoUrl = shell
|
||||||
.exec('git config --get remote.origin.url')
|
.exec('git config --get remote.origin.url')
|
||||||
.stdout.trim();
|
.stdout.trim();
|
||||||
|
@ -80,21 +80,22 @@ export async function deploy(siteDir: string): Promise<void> {
|
||||||
`${organizationName}/${projectName}.git`,
|
`${organizationName}/${projectName}.git`,
|
||||||
);
|
);
|
||||||
|
|
||||||
// We don't allow deploying to the same branch unless it's a cross publish
|
// We don't allow deploying to the same branch unless it's a cross publish.
|
||||||
if (currentBranch === deploymentBranch && !crossRepoPublish) {
|
if (currentBranch === deploymentBranch && !crossRepoPublish) {
|
||||||
throw new Error(
|
throw new Error(
|
||||||
`Cannot deploy from a ${deploymentBranch} branch. Only to it`,
|
`Cannot deploy from a ${deploymentBranch} branch. Only to it`,
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
// Save the commit hash that triggers publish-gh-pages before checking out to deployment branch
|
// Save the commit hash that triggers publish-gh-pages before checking
|
||||||
|
// out to deployment branch.
|
||||||
const currentCommit = shell.exec('git rev-parse HEAD').stdout.trim();
|
const currentCommit = shell.exec('git rev-parse HEAD').stdout.trim();
|
||||||
|
|
||||||
// Clear docusaurus 2 cache dir for deploy consistency
|
// Clear Docusaurus 2 cache dir for deploy consistency.
|
||||||
const tempDir = path.join(siteDir, '.docusaurus');
|
const tempDir = path.join(siteDir, '.docusaurus');
|
||||||
fs.removeSync(tempDir);
|
fs.removeSync(tempDir);
|
||||||
|
|
||||||
// build static html files, then push to deploymentBranch branch of specified repo
|
// Build static html files, then push to deploymentBranch branch of specified repo.
|
||||||
build(siteDir)
|
build(siteDir)
|
||||||
.then(() => {
|
.then(() => {
|
||||||
shell.cd(tempDir);
|
shell.cd(tempDir);
|
||||||
|
@ -109,8 +110,9 @@ export async function deploy(siteDir: string): Promise<void> {
|
||||||
|
|
||||||
shell.cd(`${projectName}-${deploymentBranch}`);
|
shell.cd(`${projectName}-${deploymentBranch}`);
|
||||||
|
|
||||||
// If the default branch is the one we're deploying to, then we'll fail to create it.
|
// If the default branch is the one we're deploying to, then we'll fail
|
||||||
// This is the case of a cross-repo publish, where we clone a github.io repo with a default master branch.
|
// to create it. This is the case of a cross-repo publish, where we clone
|
||||||
|
// a github.io repo with a default master branch.
|
||||||
const defaultBranch = shell
|
const defaultBranch = shell
|
||||||
.exec('git rev-parse --abbrev-ref HEAD')
|
.exec('git rev-parse --abbrev-ref HEAD')
|
||||||
.stdout.trim();
|
.stdout.trim();
|
||||||
|
@ -164,8 +166,10 @@ export async function deploy(siteDir: string): Promise<void> {
|
||||||
// The commit might return a non-zero value when site is up to date.
|
// The commit might return a non-zero value when site is up to date.
|
||||||
const websiteURL =
|
const websiteURL =
|
||||||
githubHost === 'github.com'
|
githubHost === 'github.com'
|
||||||
? `https://${organizationName}.github.io/${projectName}` // gh-pages hosted repo
|
? // gh-pages hosted repo
|
||||||
: `https://${githubHost}/pages/${organizationName}/${projectName}`; // GitHub enterprise hosting.
|
`https://${organizationName}.github.io/${projectName}`
|
||||||
|
: // GitHub enterprise hosting.
|
||||||
|
`https://${githubHost}/pages/${organizationName}/${projectName}`;
|
||||||
shell.echo(`Website is live at: ${websiteURL}`);
|
shell.echo(`Website is live at: ${websiteURL}`);
|
||||||
shell.exit(0);
|
shell.exit(0);
|
||||||
}
|
}
|
||||||
|
|
|
@ -14,12 +14,14 @@ export function externalCommand(cli: CommanderStatic, siteDir: string): void {
|
||||||
const pluginConfigs = loadPluginConfigs(context);
|
const pluginConfigs = loadPluginConfigs(context);
|
||||||
const plugins = initPlugins({pluginConfigs, context});
|
const plugins = initPlugins({pluginConfigs, context});
|
||||||
|
|
||||||
// Plugin lifecycle - extendCli
|
// Plugin Lifecycle - extendCli.
|
||||||
plugins.forEach(plugin => {
|
plugins.forEach(plugin => {
|
||||||
const {extendCli} = plugin;
|
const {extendCli} = plugin;
|
||||||
|
|
||||||
if (!extendCli) {
|
if (!extendCli) {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
extendCli(cli);
|
extendCli(cli);
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
|
@ -92,7 +92,7 @@ export async function start(
|
||||||
__dirname,
|
__dirname,
|
||||||
'../client/templates/index.html.template.ejs',
|
'../client/templates/index.html.template.ejs',
|
||||||
),
|
),
|
||||||
// so we can define the position where the scripts are injected
|
// So we can define the position where the scripts are injected.
|
||||||
inject: false,
|
inject: false,
|
||||||
filename: 'index.html',
|
filename: 'index.html',
|
||||||
title: siteConfig.title,
|
title: siteConfig.title,
|
||||||
|
@ -100,12 +100,12 @@ export async function start(
|
||||||
preBodyTags,
|
preBodyTags,
|
||||||
postBodyTags,
|
postBodyTags,
|
||||||
}),
|
}),
|
||||||
// This is necessary to emit hot updates for webpack-dev-server
|
// This is necessary to emit hot updates for webpack-dev-server.
|
||||||
new HotModuleReplacementPlugin(),
|
new HotModuleReplacementPlugin(),
|
||||||
],
|
],
|
||||||
});
|
});
|
||||||
|
|
||||||
// Plugin lifecycle - configureWebpack
|
// Plugin Lifecycle - configureWebpack.
|
||||||
plugins.forEach(plugin => {
|
plugins.forEach(plugin => {
|
||||||
const {configureWebpack} = plugin;
|
const {configureWebpack} = plugin;
|
||||||
if (!configureWebpack) {
|
if (!configureWebpack) {
|
||||||
|
@ -137,13 +137,13 @@ export async function start(
|
||||||
rewrites: [{from: /\/*/, to: baseUrl}],
|
rewrites: [{from: /\/*/, to: baseUrl}],
|
||||||
},
|
},
|
||||||
disableHostCheck: true,
|
disableHostCheck: true,
|
||||||
// Disable overlay on browser since we use CRA's overlay error reporting
|
// Disable overlay on browser since we use CRA's overlay error reporting.
|
||||||
overlay: false,
|
overlay: false,
|
||||||
host,
|
host,
|
||||||
before: (app, server) => {
|
before: (app, server) => {
|
||||||
app.use(baseUrl, express.static(path.resolve(siteDir, STATIC_DIR_NAME)));
|
app.use(baseUrl, express.static(path.resolve(siteDir, STATIC_DIR_NAME)));
|
||||||
|
|
||||||
// This lets us fetch source contents from webpack for the error overlay
|
// This lets us fetch source contents from webpack for the error overlay.
|
||||||
app.use(evalSourceMapMiddleware(server));
|
app.use(evalSourceMapMiddleware(server));
|
||||||
// This lets us open files from the runtime error overlay.
|
// This lets us open files from the runtime error overlay.
|
||||||
app.use(errorOverlayMiddleware());
|
app.use(errorOverlayMiddleware());
|
||||||
|
|
|
@ -29,7 +29,7 @@ export async function swizzle(
|
||||||
fromPath = path.join(fromPath, componentName);
|
fromPath = path.join(fromPath, componentName);
|
||||||
toPath = path.join(toPath, componentName);
|
toPath = path.join(toPath, componentName);
|
||||||
|
|
||||||
// Handle single js file only.
|
// Handle single JavaScript file only.
|
||||||
// E.g: if <fromPath> does not exist, we try to swizzle <fromPath>.js instead
|
// E.g: if <fromPath> does not exist, we try to swizzle <fromPath>.js instead
|
||||||
if (!fs.existsSync(fromPath) && fs.existsSync(`${fromPath}.js`)) {
|
if (!fs.existsSync(fromPath) && fs.existsSync(`${fromPath}.js`)) {
|
||||||
[fromPath, toPath] = [`${fromPath}.js`, `${toPath}.js`];
|
[fromPath, toPath] = [`${fromPath}.js`, `${toPath}.js`];
|
||||||
|
|
|
@ -53,10 +53,12 @@ export function loadConfig(siteDir: string): DocusaurusConfig {
|
||||||
if (!fs.existsSync(configPath)) {
|
if (!fs.existsSync(configPath)) {
|
||||||
throw new Error(`${CONFIG_FILE_NAME} not found`);
|
throw new Error(`${CONFIG_FILE_NAME} not found`);
|
||||||
}
|
}
|
||||||
|
|
||||||
const loadedConfig = importFresh(configPath) as Partial<DocusaurusConfig>;
|
const loadedConfig = importFresh(configPath) as Partial<DocusaurusConfig>;
|
||||||
const missingFields = REQUIRED_FIELDS.filter(
|
const missingFields = REQUIRED_FIELDS.filter(
|
||||||
field => !_.has(loadedConfig, field),
|
field => !_.has(loadedConfig, field),
|
||||||
);
|
);
|
||||||
|
|
||||||
if (missingFields.length > 0) {
|
if (missingFields.length > 0) {
|
||||||
throw new Error(
|
throw new Error(
|
||||||
`The required field(s) ${formatFields(
|
`The required field(s) ${formatFields(
|
||||||
|
@ -76,6 +78,7 @@ export function loadConfig(siteDir: string): DocusaurusConfig {
|
||||||
const unrecognizedFields = Object.keys(config).filter(
|
const unrecognizedFields = Object.keys(config).filter(
|
||||||
field => !allowedFields.includes(field),
|
field => !allowedFields.includes(field),
|
||||||
);
|
);
|
||||||
|
|
||||||
if (unrecognizedFields && unrecognizedFields.length > 0) {
|
if (unrecognizedFields && unrecognizedFields.length > 0) {
|
||||||
throw new Error(
|
throw new Error(
|
||||||
`The field(s) ${formatFields(
|
`The field(s) ${formatFields(
|
||||||
|
|
|
@ -44,6 +44,7 @@ export function loadHtmlTags(plugins: Plugin<any>[]): InjectedHtmlTags {
|
||||||
},
|
},
|
||||||
{headTags: '', preBodyTags: '', postBodyTags: ''},
|
{headTags: '', preBodyTags: '', postBodyTags: ''},
|
||||||
);
|
);
|
||||||
|
|
||||||
return {
|
return {
|
||||||
headTags: htmlTags.headTags.trim(),
|
headTags: htmlTags.headTags.trim(),
|
||||||
preBodyTags: htmlTags.preBodyTags.trim(),
|
preBodyTags: htmlTags.preBodyTags.trim(),
|
||||||
|
|
|
@ -52,14 +52,14 @@ export function loadPluginConfigs(context: LoadContext): PluginConfig[] {
|
||||||
return [
|
return [
|
||||||
...presetPlugins,
|
...presetPlugins,
|
||||||
...presetThemes,
|
...presetThemes,
|
||||||
// Site config should the highest priority.
|
// Site config should be the highest priority.
|
||||||
...(siteConfig.plugins || []),
|
...(siteConfig.plugins || []),
|
||||||
...(siteConfig.themes || []),
|
...(siteConfig.themes || []),
|
||||||
];
|
];
|
||||||
}
|
}
|
||||||
|
|
||||||
export async function load(siteDir: string): Promise<Props> {
|
export async function load(siteDir: string): Promise<Props> {
|
||||||
// Context
|
// Context.
|
||||||
const context: LoadContext = loadContext(siteDir);
|
const context: LoadContext = loadContext(siteDir);
|
||||||
const {generatedFilesDir, siteConfig, outDir, baseUrl} = context;
|
const {generatedFilesDir, siteConfig, outDir, baseUrl} = context;
|
||||||
const genSiteConfig = generate(
|
const genSiteConfig = generate(
|
||||||
|
@ -68,7 +68,7 @@ export async function load(siteDir: string): Promise<Props> {
|
||||||
`export default ${JSON.stringify(siteConfig, null, 2)};`,
|
`export default ${JSON.stringify(siteConfig, null, 2)};`,
|
||||||
);
|
);
|
||||||
|
|
||||||
// Plugins
|
// Plugins.
|
||||||
const pluginConfigs: PluginConfig[] = loadPluginConfigs(context);
|
const pluginConfigs: PluginConfig[] = loadPluginConfigs(context);
|
||||||
const {plugins, pluginsRouteConfigs} = await loadPlugins({
|
const {plugins, pluginsRouteConfigs} = await loadPlugins({
|
||||||
pluginConfigs,
|
pluginConfigs,
|
||||||
|
@ -83,7 +83,9 @@ export async function load(siteDir: string): Promise<Props> {
|
||||||
const userTheme = path.resolve(siteDir, THEME_PATH);
|
const userTheme = path.resolve(siteDir, THEME_PATH);
|
||||||
const alias = loadThemeAlias([fallbackTheme, ...pluginThemes, userTheme]);
|
const alias = loadThemeAlias([fallbackTheme, ...pluginThemes, userTheme]);
|
||||||
|
|
||||||
// Make a fake plugin to resolve aliased theme components && inject scripts/stylesheets
|
// Make a fake plugin to:
|
||||||
|
// - Resolve aliased theme components
|
||||||
|
// - Inject scripts/stylesheets
|
||||||
const {stylesheets = [], scripts = []} = siteConfig;
|
const {stylesheets = [], scripts = []} = siteConfig;
|
||||||
plugins.push({
|
plugins.push({
|
||||||
name: 'docusaurus-bootstrap-plugin',
|
name: 'docusaurus-bootstrap-plugin',
|
||||||
|
@ -134,10 +136,10 @@ export async function load(siteDir: string): Promise<Props> {
|
||||||
.join('\n')}\n];\n`,
|
.join('\n')}\n];\n`,
|
||||||
);
|
);
|
||||||
|
|
||||||
// Load extra head & body html tags
|
// Load extra head & body html tags.
|
||||||
const {headTags, preBodyTags, postBodyTags} = loadHtmlTags(plugins);
|
const {headTags, preBodyTags, postBodyTags} = loadHtmlTags(plugins);
|
||||||
|
|
||||||
// Routing
|
// Routing.
|
||||||
const {
|
const {
|
||||||
registry,
|
registry,
|
||||||
routesChunkNames,
|
routesChunkNames,
|
||||||
|
|
|
@ -9,7 +9,7 @@ import path from 'path';
|
||||||
import {load} from './index';
|
import {load} from './index';
|
||||||
import {Props} from '@docusaurus/types';
|
import {Props} from '@docusaurus/types';
|
||||||
|
|
||||||
// Helper methods to setup dummy/fake projects
|
// Helper methods to setup dummy/fake projects.
|
||||||
export const loadSetup = async (name: string): Promise<Props> => {
|
export const loadSetup = async (name: string): Promise<Props> => {
|
||||||
const fixtures = path.join(__dirname, '__tests__', '__fixtures__');
|
const fixtures = path.join(__dirname, '__tests__', '__fixtures__');
|
||||||
const simpleSite = path.join(fixtures, 'simple-site');
|
const simpleSite = path.join(fixtures, 'simple-site');
|
||||||
|
|
|
@ -18,7 +18,8 @@ import {
|
||||||
import {initPlugins} from './init';
|
import {initPlugins} from './init';
|
||||||
|
|
||||||
export function sortConfig(routeConfigs: RouteConfig[]) {
|
export function sortConfig(routeConfigs: RouteConfig[]) {
|
||||||
// Sort the route config. This ensures that route with nested routes is always placed last
|
// Sort the route config. This ensures that route with nested
|
||||||
|
// routes is always placed last.
|
||||||
routeConfigs.sort((a, b) => {
|
routeConfigs.sort((a, b) => {
|
||||||
if (a.routes && !b.routes) {
|
if (a.routes && !b.routes) {
|
||||||
return 1;
|
return 1;
|
||||||
|
@ -26,15 +27,17 @@ export function sortConfig(routeConfigs: RouteConfig[]) {
|
||||||
if (!a.routes && b.routes) {
|
if (!a.routes && b.routes) {
|
||||||
return -1;
|
return -1;
|
||||||
}
|
}
|
||||||
// Higher priority get placed first
|
// Higher priority get placed first.
|
||||||
if (a.priority || b.priority) {
|
if (a.priority || b.priority) {
|
||||||
const priorityA = a.priority || 0;
|
const priorityA = a.priority || 0;
|
||||||
const priorityB = b.priority || 0;
|
const priorityB = b.priority || 0;
|
||||||
const score = priorityA > priorityB ? -1 : priorityB > priorityA ? 1 : 0;
|
const score = priorityA > priorityB ? -1 : priorityB > priorityA ? 1 : 0;
|
||||||
|
|
||||||
if (score !== 0) {
|
if (score !== 0) {
|
||||||
return score;
|
return score;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
return a.path > b.path ? 1 : b.path > a.path ? -1 : 0;
|
return a.path > b.path ? 1 : b.path > a.path ? -1 : 0;
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
@ -49,24 +52,24 @@ export async function loadPlugins({
|
||||||
plugins: Plugin<any>[];
|
plugins: Plugin<any>[];
|
||||||
pluginsRouteConfigs: RouteConfig[];
|
pluginsRouteConfigs: RouteConfig[];
|
||||||
}> {
|
}> {
|
||||||
// 1. Plugin Lifecycle - Initialization/Constructor
|
// 1. Plugin Lifecycle - Initialization/Constructor.
|
||||||
const plugins: Plugin<any>[] = initPlugins({pluginConfigs, context});
|
const plugins: Plugin<any>[] = initPlugins({pluginConfigs, context});
|
||||||
|
|
||||||
// 2. Plugin lifecycle - loadContent
|
// 2. Plugin Lifecycle - loadContent.
|
||||||
// Currently plugins run lifecycle in parallel and are not order-dependent. We could change
|
// Currently plugins run lifecycle methods in parallel and are not order-dependent.
|
||||||
// this in future if there are plugins which need to run in certain order or depend on
|
// We could change this in future if there are plugins which need to
|
||||||
// others for data.
|
// run in certain order or depend on others for data.
|
||||||
const pluginsLoadedContent = await Promise.all(
|
const pluginsLoadedContent = await Promise.all(
|
||||||
plugins.map(async plugin => {
|
plugins.map(async plugin => {
|
||||||
if (!plugin.loadContent) {
|
if (!plugin.loadContent) {
|
||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
const content = await plugin.loadContent();
|
|
||||||
return content;
|
return await plugin.loadContent();
|
||||||
}),
|
}),
|
||||||
);
|
);
|
||||||
|
|
||||||
// 3. Plugin lifecycle - contentLoaded
|
// 3. Plugin Lifecycle - contentLoaded.
|
||||||
const pluginsRouteConfigs: RouteConfig[] = [];
|
const pluginsRouteConfigs: RouteConfig[] = [];
|
||||||
|
|
||||||
await Promise.all(
|
await Promise.all(
|
||||||
|
@ -97,7 +100,8 @@ export async function loadPlugins({
|
||||||
}),
|
}),
|
||||||
);
|
);
|
||||||
|
|
||||||
// Sort the route config. This ensures that route with nested routes is always placed last
|
// Sort the route config. This ensures that route with nested
|
||||||
|
// routes are always placed last.
|
||||||
sortConfig(pluginsRouteConfigs);
|
sortConfig(pluginsRouteConfigs);
|
||||||
|
|
||||||
return {
|
return {
|
||||||
|
|
|
@ -20,6 +20,7 @@ export function initPlugins({
|
||||||
pluginConfigs.map(pluginItem => {
|
pluginConfigs.map(pluginItem => {
|
||||||
let pluginModuleImport;
|
let pluginModuleImport;
|
||||||
let pluginOptions = {};
|
let pluginOptions = {};
|
||||||
|
|
||||||
if (!pluginItem) {
|
if (!pluginItem) {
|
||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
|
@ -35,7 +36,8 @@ export function initPlugins({
|
||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
|
|
||||||
// module is any valid module identifier - npm package or locally-resolved path.
|
// The pluginModuleImport value is any valid
|
||||||
|
// module identifier - npm package or locally-resolved path.
|
||||||
const pluginModule: any = importFresh(pluginModuleImport);
|
const pluginModule: any = importFresh(pluginModuleImport);
|
||||||
return (pluginModule.default || pluginModule)(context, pluginOptions);
|
return (pluginModule.default || pluginModule)(context, pluginOptions);
|
||||||
}),
|
}),
|
||||||
|
|
|
@ -54,7 +54,7 @@ export async function loadRoutes(
|
||||||
[routePath: string]: ChunkNames;
|
[routePath: string]: ChunkNames;
|
||||||
} = {};
|
} = {};
|
||||||
|
|
||||||
// This is the higher level overview of route code generation
|
// This is the higher level overview of route code generation.
|
||||||
function generateRouteCode(routeConfig: RouteConfig): string {
|
function generateRouteCode(routeConfig: RouteConfig): string {
|
||||||
const {
|
const {
|
||||||
path: routePath,
|
path: routePath,
|
||||||
|
@ -94,7 +94,7 @@ export async function loadRoutes(
|
||||||
if (isModule(value)) {
|
if (isModule(value)) {
|
||||||
const modulePath = getModulePath(value);
|
const modulePath = getModulePath(value);
|
||||||
const chunkName = genChunkName(modulePath, prefix, name);
|
const chunkName = genChunkName(modulePath, prefix, name);
|
||||||
// We need to JSON.stringify so that if its on windows, backslash are escaped.
|
// We need to JSON.stringify so that if its on windows, backslashes are escaped.
|
||||||
const loader = `() => import(/* webpackChunkName: '${chunkName}' */ ${JSON.stringify(
|
const loader = `() => import(/* webpackChunkName: '${chunkName}' */ ${JSON.stringify(
|
||||||
modulePath,
|
modulePath,
|
||||||
)})`;
|
)})`;
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue