mirror of
https://github.com/facebook/docusaurus.git
synced 2025-05-10 15:47:23 +02:00
misc(v2): clean up comments in code (#2294)
This commit is contained in:
parent
d7f3dff6e6
commit
996b115199
45 changed files with 234 additions and 155 deletions
|
@ -50,7 +50,7 @@ export async function init(
|
|||
|
||||
let name = siteName;
|
||||
|
||||
// Prompt if siteName is not passed from CLI
|
||||
// Prompt if siteName is not passed from CLI.
|
||||
if (!name) {
|
||||
const {name: promptedName} = await inquirer.prompt({
|
||||
type: 'input',
|
||||
|
@ -71,7 +71,7 @@ export async function init(
|
|||
}
|
||||
|
||||
let template = reqTemplate;
|
||||
// Prompt if template is not provided from CLI
|
||||
// Prompt if template is not provided from CLI.
|
||||
if (!template) {
|
||||
const {template: promptedTemplate} = await inquirer.prompt({
|
||||
type: 'list',
|
||||
|
@ -82,7 +82,7 @@ export async function init(
|
|||
template = promptedTemplate;
|
||||
}
|
||||
|
||||
// If user choose Git repository, we'll prompt for the url
|
||||
// If user choose Git repository, we'll prompt for the url.
|
||||
if (template === gitChoice) {
|
||||
const {gitRepoUrl} = await inquirer.prompt({
|
||||
type: 'input',
|
||||
|
@ -112,7 +112,7 @@ export async function init(
|
|||
throw new Error(chalk.red(`Cloning Git template: ${template} failed!`));
|
||||
}
|
||||
} else if (template && templates.includes(template)) {
|
||||
// Docusaurus templates
|
||||
// Docusaurus templates.
|
||||
try {
|
||||
await fs.copy(path.resolve(templatesDir, template), dest);
|
||||
} catch (err) {
|
||||
|
@ -125,7 +125,7 @@ export async function init(
|
|||
throw new Error('Invalid template');
|
||||
}
|
||||
|
||||
// Update package.json info
|
||||
// Update package.json info.
|
||||
try {
|
||||
await updatePkg(path.join(dest, 'package.json'), {
|
||||
name: kebabCase(name),
|
||||
|
@ -137,7 +137,7 @@ export async function init(
|
|||
throw err;
|
||||
}
|
||||
|
||||
// We need to Rename the gitignore file to .gitignore
|
||||
// We need to rename the gitignore file to .gitignore
|
||||
if (
|
||||
!fs.pathExistsSync(path.join(dest, '.gitignore')) &&
|
||||
fs.pathExistsSync(path.join(dest, 'gitignore'))
|
||||
|
|
|
@ -46,12 +46,13 @@ module.exports = async function(fileString) {
|
|||
|
||||
let exportStr = `export const frontMatter = ${stringifyObject(data)};`;
|
||||
|
||||
// Read metadata for this MDX and export it
|
||||
// Read metadata for this MDX and export it.
|
||||
if (options.metadataPath && typeof options.metadataPath === 'function') {
|
||||
const metadataPath = options.metadataPath(this.resourcePath);
|
||||
|
||||
if (metadataPath) {
|
||||
// Add as dependency of this loader result so that we can recompile if metadata is changed
|
||||
// Add as dependency of this loader result so that we can
|
||||
// recompile if metadata is changed.
|
||||
this.addDependency(metadataPath);
|
||||
const metadata = await readFile(metadataPath, 'utf8');
|
||||
exportStr += `\nexport const metadata = ${metadata};`;
|
||||
|
|
|
@ -29,12 +29,13 @@ function toValue(node) {
|
|||
default:
|
||||
}
|
||||
}
|
||||
|
||||
return toString(node);
|
||||
}
|
||||
|
||||
// Visit all headings. We `slug` all headings (to account for
|
||||
// duplicates), but only take h2 and h3 headings.
|
||||
const search = node => {
|
||||
function search(node) {
|
||||
const headings = [];
|
||||
let current = -1;
|
||||
let currentDepth = 0;
|
||||
|
@ -65,6 +66,6 @@ const search = node => {
|
|||
visit(node, 'heading', onHeading);
|
||||
|
||||
return headings;
|
||||
};
|
||||
}
|
||||
|
||||
module.exports = search;
|
||||
|
|
|
@ -18,7 +18,7 @@ export function truncate(fileString: string, truncateMarker: RegExp) {
|
|||
}
|
||||
|
||||
// YYYY-MM-DD-{name}.mdx?
|
||||
// prefer named capture, but old Node version does not support.
|
||||
// Prefer named capture, but older Node versions do not support it.
|
||||
const FILENAME_PATTERN = /^(\d{4}-\d{1,2}-\d{1,2})-?(.*?).mdx?$/;
|
||||
|
||||
function toUrl({date, link}: DateLink) {
|
||||
|
@ -111,15 +111,18 @@ export async function generateBlogPosts(
|
|||
// Extract date and title from filename.
|
||||
const match = blogFileName.match(FILENAME_PATTERN);
|
||||
let linkName = blogFileName.replace(/\.mdx?$/, '');
|
||||
|
||||
if (match) {
|
||||
const [, dateString, name] = match;
|
||||
date = new Date(dateString);
|
||||
linkName = name;
|
||||
}
|
||||
|
||||
// Prefer user-defined date.
|
||||
if (frontMatter.date) {
|
||||
date = new Date(frontMatter.date);
|
||||
}
|
||||
|
||||
// Use file create time for blog.
|
||||
date = date || (await fs.stat(source)).birthtime;
|
||||
frontMatter.title = frontMatter.title || linkName;
|
||||
|
|
|
@ -41,7 +41,7 @@ const DEFAULT_OPTIONS: PluginOptions = {
|
|||
blogTagsPostsComponent: '@theme/BlogTagsPostsPage',
|
||||
remarkPlugins: [],
|
||||
rehypePlugins: [],
|
||||
truncateMarker: /<!--\s*(truncate)\s*-->/, // Regex
|
||||
truncateMarker: /<!--\s*(truncate)\s*-->/, // Regex.
|
||||
};
|
||||
|
||||
function assertFeedTypes(val: any): asserts val is FeedType {
|
||||
|
@ -94,7 +94,7 @@ export default function pluginContentBlog(
|
|||
return null;
|
||||
}
|
||||
|
||||
// Colocate next and prev metadata
|
||||
// Colocate next and prev metadata.
|
||||
blogPosts.forEach((blogPost, index) => {
|
||||
const prevItem = index > 0 ? blogPosts[index - 1] : null;
|
||||
if (prevItem) {
|
||||
|
@ -103,6 +103,7 @@ export default function pluginContentBlog(
|
|||
permalink: prevItem.metadata.permalink,
|
||||
};
|
||||
}
|
||||
|
||||
const nextItem =
|
||||
index < blogPosts.length - 1 ? blogPosts[index + 1] : null;
|
||||
if (nextItem) {
|
||||
|
@ -168,7 +169,8 @@ export default function pluginContentBlog(
|
|||
const permalink = normalizeUrl([tagsPath, normalizedTag]);
|
||||
if (!blogTags[normalizedTag]) {
|
||||
blogTags[normalizedTag] = {
|
||||
name: tag.toLowerCase(), // Will only use the name of the first occurrence of the tag.
|
||||
// Will only use the name of the first occurrence of the tag.
|
||||
name: tag.toLowerCase(),
|
||||
items: [],
|
||||
permalink,
|
||||
};
|
||||
|
@ -232,7 +234,8 @@ export default function pluginContentBlog(
|
|||
blogPosts.map(async blogPost => {
|
||||
const {id, metadata} = blogPost;
|
||||
await createData(
|
||||
// Note that this created data path must be in sync with metadataPath provided to mdx-loader
|
||||
// Note that this created data path must be in sync with
|
||||
// metadataPath provided to mdx-loader.
|
||||
`${docuHash(metadata.source)}.json`,
|
||||
JSON.stringify(metadata, null, 2),
|
||||
);
|
||||
|
@ -374,7 +377,8 @@ export default function pluginContentBlog(
|
|||
options: {
|
||||
remarkPlugins,
|
||||
rehypePlugins,
|
||||
// Note that metadataPath must be the same/ in-sync as the path from createData for each MDX
|
||||
// Note that metadataPath must be the same/in-sync as
|
||||
// the path from createData for each MDX.
|
||||
metadataPath: (mdxPath: string) => {
|
||||
const aliasedSource = aliasedSitePath(mdxPath, siteDir);
|
||||
return path.join(
|
||||
|
|
|
@ -16,10 +16,11 @@ export = function(fileString: string) {
|
|||
|
||||
let finalContent = fileString;
|
||||
|
||||
// Truncate content if requested (e.g: file.md?truncated=true)
|
||||
// Truncate content if requested (e.g: file.md?truncated=true).
|
||||
const {truncated} = this.resourceQuery && parseQuery(this.resourceQuery);
|
||||
if (truncated) {
|
||||
finalContent = truncate(fileString, truncateMarker);
|
||||
}
|
||||
|
||||
return callback && callback(null, finalContent);
|
||||
} as loader.Loader;
|
||||
|
|
|
@ -71,7 +71,7 @@ export default function pluginContentDocs(
|
|||
'docusaurus-plugin-content-docs',
|
||||
);
|
||||
|
||||
// Versioning
|
||||
// Versioning.
|
||||
const env = loadEnv(siteDir);
|
||||
const {versioning} = env;
|
||||
const {
|
||||
|
@ -128,7 +128,7 @@ export default function pluginContentDocs(
|
|||
const docsMetadataRaw: DocsMetadataRaw = {};
|
||||
const docsPromises = [];
|
||||
|
||||
// Metadata for default/ master docs files.
|
||||
// Metadata for default/master docs files.
|
||||
const docsFiles = await globby(include, {
|
||||
cwd: docsDir,
|
||||
});
|
||||
|
@ -147,7 +147,7 @@ export default function pluginContentDocs(
|
|||
),
|
||||
);
|
||||
|
||||
// Metadata for versioned docs
|
||||
// Metadata for versioned docs.
|
||||
if (versioning.enabled) {
|
||||
const versionedGlob = _.flatten(
|
||||
include.map(pattern =>
|
||||
|
@ -173,7 +173,7 @@ export default function pluginContentDocs(
|
|||
);
|
||||
}
|
||||
|
||||
// Load the sidebars & create docs ordering
|
||||
// Load the sidebars and create docs ordering.
|
||||
const sidebarPaths = [
|
||||
sidebarPath,
|
||||
...versionsNames.map(
|
||||
|
@ -185,7 +185,7 @@ export default function pluginContentDocs(
|
|||
|
||||
await Promise.all(docsPromises);
|
||||
|
||||
// Construct inter-metadata relationship in docsMetadata
|
||||
// Construct inter-metadata relationship in docsMetadata.
|
||||
const docsMetadata: DocsMetadata = {};
|
||||
const permalinkToSidebar: PermalinkToSidebar = {};
|
||||
const versionToSidebars: VersionToSidebars = {};
|
||||
|
@ -211,7 +211,7 @@ export default function pluginContentDocs(
|
|||
next,
|
||||
};
|
||||
|
||||
// sourceToPermalink and permalinkToSidebar mapping
|
||||
// sourceToPermalink and permalinkToSidebar mapping.
|
||||
const {source, permalink, version} = docsMetadataRaw[currentID];
|
||||
sourceToPermalink[source] = permalink;
|
||||
if (sidebar) {
|
||||
|
@ -255,8 +255,9 @@ export default function pluginContentDocs(
|
|||
}
|
||||
};
|
||||
|
||||
// Transform the sidebar so that all sidebar item will be in the form of 'link' or 'category' only
|
||||
// This is what will be passed as props to the UI component
|
||||
// Transform the sidebar so that all sidebar item will be in the
|
||||
// form of 'link' or 'category' only.
|
||||
// This is what will be passed as props to the UI component.
|
||||
const docsSidebars: DocsSidebar = Object.entries(loadedSidebars).reduce(
|
||||
(acc: DocsSidebar, [sidebarId, sidebarItems]) => {
|
||||
acc[sidebarId] = sidebarItems.map(normalizeItem);
|
||||
|
@ -290,10 +291,12 @@ export default function pluginContentDocs(
|
|||
const routes = await Promise.all(
|
||||
metadataItems.map(async metadataItem => {
|
||||
await createData(
|
||||
// Note that this created data path must be in sync with metadataPath provided to mdx-loader
|
||||
// Note that this created data path must be in sync with
|
||||
// metadataPath provided to mdx-loader.
|
||||
`${docuHash(metadataItem.source)}.json`,
|
||||
JSON.stringify(metadataItem, null, 2),
|
||||
);
|
||||
|
||||
return {
|
||||
path: metadataItem.permalink,
|
||||
component: docItemComponent,
|
||||
|
@ -304,6 +307,7 @@ export default function pluginContentDocs(
|
|||
};
|
||||
}),
|
||||
);
|
||||
|
||||
return routes.sort((a, b) =>
|
||||
a.path > b.path ? 1 : b.path > a.path ? -1 : 0,
|
||||
);
|
||||
|
@ -331,8 +335,8 @@ export default function pluginContentDocs(
|
|||
});
|
||||
};
|
||||
|
||||
// If versioning is enabled, we cleverly chunk the generated routes to be by version
|
||||
// and pick only needed base metadata
|
||||
// If versioning is enabled, we cleverly chunk the generated routes
|
||||
// to be by version and pick only needed base metadata.
|
||||
if (versioning.enabled) {
|
||||
const docsMetadataByVersion = _.groupBy(
|
||||
Object.values(content.docsMetadata),
|
||||
|
@ -365,8 +369,9 @@ export default function pluginContentDocs(
|
|||
version,
|
||||
};
|
||||
|
||||
// We want latest version route config to be placed last in the generated routeconfig.
|
||||
// Otherwise, `/docs/next/foo` will match `/docs/:route` instead of `/docs/next/:route`
|
||||
// We want latest version route config to be placed last in the
|
||||
// generated routeconfig. Otherwise, `/docs/next/foo` will match
|
||||
// `/docs/:route` instead of `/docs/next/:route`.
|
||||
return addBaseRoute(
|
||||
docsBaseRoute,
|
||||
docsBaseMetadata,
|
||||
|
@ -410,7 +415,8 @@ export default function pluginContentDocs(
|
|||
remarkPlugins,
|
||||
rehypePlugins,
|
||||
metadataPath: (mdxPath: string) => {
|
||||
// Note that metadataPath must be the same/ in-sync as the path from createData for each MDX
|
||||
// Note that metadataPath must be the same/in-sync as
|
||||
// the path from createData for each MDX.
|
||||
const aliasedSource = aliasedSitePath(mdxPath, siteDir);
|
||||
return path.join(
|
||||
dataDir,
|
||||
|
|
|
@ -31,7 +31,8 @@ export default async function getFileLastUpdate(
|
|||
: {timestamp: +temp[1], author: temp[2]};
|
||||
}
|
||||
|
||||
// Wrap in try/catch in case the shell commands fail (e.g. project doesn't use Git, etc).
|
||||
// Wrap in try/catch in case the shell commands fail
|
||||
// (e.g. project doesn't use Git, etc).
|
||||
try {
|
||||
if (!shell.which('git')) {
|
||||
if (!showedGitRequirementError) {
|
||||
|
|
|
@ -64,6 +64,7 @@ export default function(
|
|||
}
|
||||
return modifiedLine;
|
||||
});
|
||||
|
||||
content = lines.join('\n');
|
||||
}
|
||||
|
||||
|
|
|
@ -32,7 +32,7 @@ async function lastUpdated(
|
|||
): Promise<LastUpdateData> {
|
||||
const {showLastUpdateAuthor, showLastUpdateTime} = options;
|
||||
if (showLastUpdateAuthor || showLastUpdateTime) {
|
||||
// Use fake data in dev for faster development
|
||||
// Use fake data in dev for faster development.
|
||||
const fileLastUpdateData =
|
||||
process.env.NODE_ENV === 'production'
|
||||
? await lastUpdate(filePath)
|
||||
|
@ -49,6 +49,7 @@ async function lastUpdated(
|
|||
};
|
||||
}
|
||||
}
|
||||
|
||||
return {};
|
||||
}
|
||||
|
||||
|
@ -83,7 +84,7 @@ export default async function processMetadata({
|
|||
}
|
||||
}
|
||||
|
||||
// The version portion of the url path. Eg: 'next', '1.0.0', and ''
|
||||
// The version portion of the url path. Eg: 'next', '1.0.0', and ''.
|
||||
const versionPath =
|
||||
version && version !== versioning.latestVersion ? version : '';
|
||||
|
||||
|
@ -112,7 +113,7 @@ export default async function processMetadata({
|
|||
|
||||
const description: string = frontMatter.description || excerpt;
|
||||
|
||||
// The last portion of the url path. Eg: 'foo/bar', 'bar'
|
||||
// The last portion of the url path. Eg: 'foo/bar', 'bar'.
|
||||
const routePath =
|
||||
version && version !== 'next'
|
||||
? id.replace(new RegExp(`^version-${version}/`), '')
|
||||
|
@ -126,8 +127,10 @@ export default async function processMetadata({
|
|||
|
||||
const {lastUpdatedAt, lastUpdatedBy} = await lastUpdatedPromise;
|
||||
|
||||
// Assign all of object properties during instantiation (if possible) for NodeJS optimization
|
||||
// Adding properties to object after instantiation will cause hidden class transitions.
|
||||
// Assign all of object properties during instantiation (if possible) for
|
||||
// NodeJS optimization.
|
||||
// Adding properties to object after instantiation will cause hidden
|
||||
// class transitions.
|
||||
const metadata: MetadataRaw = {
|
||||
id,
|
||||
title,
|
||||
|
|
|
@ -18,7 +18,7 @@ import {
|
|||
} from './types';
|
||||
|
||||
/**
|
||||
* Check that item contains only allowed keys
|
||||
* Check that item contains only allowed keys.
|
||||
*/
|
||||
function assertItem(item: Object, keys: string[]): void {
|
||||
const unknownKeys = Object.keys(item).filter(
|
||||
|
@ -72,8 +72,8 @@ function assertIsLink(item: any): asserts item is SidebarItemLink {
|
|||
}
|
||||
|
||||
/**
|
||||
* Normalizes recursively item and all its children. Ensures, that at the end
|
||||
* each item will be an object with the corresponding type
|
||||
* Normalizes recursively item and all its children. Ensures that at the end
|
||||
* each item will be an object with the corresponding type.
|
||||
*/
|
||||
function normalizeItem(item: SidebarItemRaw): SidebarItem {
|
||||
if (typeof item === 'string') {
|
||||
|
@ -102,7 +102,7 @@ function normalizeItem(item: SidebarItemRaw): SidebarItem {
|
|||
}
|
||||
|
||||
/**
|
||||
* Converts sidebars object to mapping to arrays of sidebar item objects
|
||||
* Converts sidebars object to mapping to arrays of sidebar item objects.
|
||||
*/
|
||||
function normalizeSidebar(sidebars: SidebarRaw): Sidebar {
|
||||
return Object.entries(sidebars).reduce(
|
||||
|
@ -110,7 +110,7 @@ function normalizeSidebar(sidebars: SidebarRaw): Sidebar {
|
|||
let normalizedSidebar: SidebarItemRaw[];
|
||||
|
||||
if (!Array.isArray(sidebar)) {
|
||||
// convert sidebar to a more generic structure
|
||||
// Convert sidebar to a more generic structure.
|
||||
normalizedSidebar = Object.entries(sidebar).map(([label, items]) => ({
|
||||
type: 'category',
|
||||
label,
|
||||
|
@ -129,11 +129,13 @@ function normalizeSidebar(sidebars: SidebarRaw): Sidebar {
|
|||
}
|
||||
|
||||
export default function loadSidebars(sidebarPaths?: string[]): Sidebar {
|
||||
// We don't want sidebars to be cached because of hotreloading.
|
||||
// We don't want sidebars to be cached because of hot reloading.
|
||||
let allSidebars: SidebarRaw = {};
|
||||
|
||||
if (!sidebarPaths || !sidebarPaths.length) {
|
||||
return {} as Sidebar;
|
||||
}
|
||||
|
||||
sidebarPaths.map(sidebarPath => {
|
||||
if (sidebarPath && fs.existsSync(sidebarPath)) {
|
||||
const sidebar = importFresh(sidebarPath) as SidebarRaw;
|
||||
|
|
|
@ -25,18 +25,21 @@ export function docsVersion(
|
|||
'No version tag specified!. Pass the version you wish to create as an argument. Ex: 1.0.0',
|
||||
);
|
||||
}
|
||||
|
||||
if (version.includes('/') || version.includes('\\')) {
|
||||
throw new Error(
|
||||
`Invalid version tag specified! Do not include slash (/) or (\\). Try something like: 1.0.0`,
|
||||
);
|
||||
}
|
||||
|
||||
if (version.length > 32) {
|
||||
throw new Error(
|
||||
'Invalid version tag specified! Length must <= 32 characters. Try something like: 1.0.0',
|
||||
);
|
||||
}
|
||||
|
||||
// Since we are going to create `version-${version}` folder, we need to make sure its a valid path name
|
||||
// Since we are going to create `version-${version}` folder, we need to make
|
||||
// sure it's a valid pathname.
|
||||
if (/[<>:"\/\\|?*\x00-\x1F]/g.test(version)) {
|
||||
throw new Error(
|
||||
'Invalid version tag specified! Please ensure its a valid pathname too. Try something like: 1.0.0',
|
||||
|
@ -49,14 +52,14 @@ export function docsVersion(
|
|||
);
|
||||
}
|
||||
|
||||
// Load existing versions
|
||||
// Load existing versions.
|
||||
let versions = [];
|
||||
const versionsJSONFile = getVersionsJSONFile(siteDir);
|
||||
if (fs.existsSync(versionsJSONFile)) {
|
||||
versions = JSON.parse(fs.readFileSync(versionsJSONFile, 'utf8'));
|
||||
}
|
||||
|
||||
// Check if version already exist
|
||||
// Check if version already exists.
|
||||
if (versions.includes(version)) {
|
||||
throw new Error(
|
||||
'This version already exists!. Use a version tag that does not already exist.',
|
||||
|
@ -65,7 +68,7 @@ export function docsVersion(
|
|||
|
||||
const {path: docsPath, sidebarPath} = options;
|
||||
|
||||
// Copy docs files
|
||||
// Copy docs files.
|
||||
const docsDir = path.join(siteDir, docsPath);
|
||||
if (fs.existsSync(docsDir) && fs.readdirSync(docsDir).length > 0) {
|
||||
const versionedDir = getVersionedDocsDir(siteDir);
|
||||
|
@ -75,11 +78,11 @@ export function docsVersion(
|
|||
throw new Error('There is no docs to version !');
|
||||
}
|
||||
|
||||
// Load current sidebar and create a new versioned sidebars file
|
||||
// Load current sidebar and create a new versioned sidebars file.
|
||||
if (fs.existsSync(sidebarPath)) {
|
||||
const loadedSidebars: Sidebar = loadSidebars([sidebarPath]);
|
||||
|
||||
// Transform id in original sidebar to versioned id
|
||||
// Transform id in original sidebar to versioned id.
|
||||
const normalizeItem = (item: SidebarItem): SidebarItem => {
|
||||
switch (item.type) {
|
||||
case 'category':
|
||||
|
@ -117,7 +120,7 @@ export function docsVersion(
|
|||
);
|
||||
}
|
||||
|
||||
// update versions.json file
|
||||
// Update versions.json file.
|
||||
versions.unshift(version);
|
||||
fs.ensureDirSync(path.dirname(versionsJSONFile));
|
||||
fs.writeFileSync(versionsJSONFile, `${JSON.stringify(versions, null, 2)}\n`);
|
||||
|
|
|
@ -15,7 +15,8 @@ export default (function() {
|
|||
// Set page so that subsequent hits on this page are attributed
|
||||
// to this page. This is recommended for Single-page Applications.
|
||||
window.ga('set', 'page', location.pathname);
|
||||
// Always refer to the variable on window in-case it gets overridden elsewhere.
|
||||
// Always refer to the variable on window in-case it gets
|
||||
// overridden elsewhere.
|
||||
window.ga('send', 'pageview');
|
||||
},
|
||||
};
|
||||
|
|
|
@ -26,7 +26,9 @@ module.exports = function(context) {
|
|||
'Please ensure this is not a mistake.',
|
||||
);
|
||||
}
|
||||
|
||||
const isProd = process.env.NODE_ENV === 'production';
|
||||
|
||||
return {
|
||||
name: 'docusaurus-plugin-google-analytics',
|
||||
|
||||
|
|
|
@ -28,6 +28,7 @@ module.exports = function(context) {
|
|||
}
|
||||
|
||||
const isProd = process.env.NODE_ENV === 'production';
|
||||
|
||||
return {
|
||||
name: 'docusaurus-plugin-google-gtag',
|
||||
|
||||
|
|
|
@ -12,6 +12,7 @@ import path from 'path';
|
|||
|
||||
export default function(_context: LoadContext, options: PluginOptions) {
|
||||
const isProd = process.env.NODE_ENV === 'production';
|
||||
|
||||
return {
|
||||
name: 'docusaurus-plugin-ideal-image',
|
||||
|
||||
|
|
|
@ -12,7 +12,7 @@ import createSitemap from './createSitemap';
|
|||
import {LoadContext, Props} from '@docusaurus/types';
|
||||
|
||||
const DEFAULT_OPTIONS: PluginOptions = {
|
||||
cacheTime: 600 * 1000, // 600 sec - cache purge period
|
||||
cacheTime: 600 * 1000, // 600 sec - cache purge period.
|
||||
changefreq: 'weekly',
|
||||
priority: 0.5,
|
||||
};
|
||||
|
@ -27,14 +27,14 @@ export default function pluginSitemap(
|
|||
name: 'docusaurus-plugin-sitemap',
|
||||
|
||||
async postBuild({siteConfig, routesPaths, outDir}: Props) {
|
||||
// Generate sitemap
|
||||
// Generate sitemap.
|
||||
const generatedSitemap = createSitemap(
|
||||
siteConfig,
|
||||
routesPaths,
|
||||
options,
|
||||
).toString();
|
||||
|
||||
// Write sitemap file
|
||||
// Write sitemap file.
|
||||
const sitemapPath = path.join(outDir, 'sitemap.xml');
|
||||
fs.writeFile(sitemapPath, generatedSitemap, err => {
|
||||
if (err) {
|
||||
|
|
|
@ -12,15 +12,18 @@ const addAdmonitions = pluginOptions => {
|
|||
remarkPlugins: [admonitions],
|
||||
};
|
||||
}
|
||||
|
||||
if (pluginOptions.admonitions === false) {
|
||||
return pluginOptions;
|
||||
}
|
||||
|
||||
const admonitionsOptions = {
|
||||
remarkPlugins: (pluginOptions.remarkPlugins || []).concat([
|
||||
admonitions,
|
||||
pluginOptions.admonitions || {},
|
||||
]),
|
||||
};
|
||||
|
||||
return {
|
||||
...pluginOptions,
|
||||
...admonitionsOptions,
|
||||
|
@ -39,7 +42,7 @@ module.exports = function preset(context, opts = {}) {
|
|||
return {
|
||||
themes: [
|
||||
['@docusaurus/theme-classic', opts.theme],
|
||||
// Don't add this if algolia config is not defined
|
||||
// Don't add this if algolia config is not defined.
|
||||
algolia && '@docusaurus/theme-search-algolia',
|
||||
],
|
||||
plugins: [
|
||||
|
|
|
@ -11,7 +11,7 @@ import useDocusaurusContext from '@docusaurus/useDocusaurusContext';
|
|||
import useBaseUrl from '@docusaurus/useBaseUrl';
|
||||
import useLockBodyScroll from '@theme/hooks/useLockBodyScroll';
|
||||
import Link from '@docusaurus/Link';
|
||||
import isInternalUrl from '@docusaurus/utils'; // eslint-disable-line import/no-extraneous-dependencies
|
||||
import isInternalUrl from '@docusaurus/isInternalUrl';
|
||||
|
||||
import styles from './styles.module.css';
|
||||
|
||||
|
|
|
@ -5,4 +5,4 @@
|
|||
* LICENSE file in the root directory of this source tree.
|
||||
*/
|
||||
|
||||
export {default} from '@docusaurus/Noop';
|
||||
export {default} from '@docusaurus/noop';
|
||||
|
|
|
@ -19,7 +19,8 @@ module.exports = function() {
|
|||
return {
|
||||
resolve: {
|
||||
alias: {
|
||||
// fork of Buble which removes Buble's large dependency and weighs in at a smaller size of ~51kB
|
||||
// fork of Buble which removes Buble's large dependency and weighs in
|
||||
// at a smaller size of ~51kB
|
||||
// https://github.com/FormidableLabs/react-live#what-bundle-size-can-i-expect
|
||||
buble: '@philpl/buble',
|
||||
},
|
||||
|
|
|
@ -16,7 +16,7 @@ module.exports = function() {
|
|||
},
|
||||
|
||||
configureWebpack() {
|
||||
// Ensure that algolia docsearch css is its own chunk
|
||||
// Ensure that algolia docsearch styles is its own chunk.
|
||||
return {
|
||||
optimization: {
|
||||
splitChunks: {
|
||||
|
@ -26,7 +26,7 @@ module.exports = function() {
|
|||
test: /algolia\.css$/,
|
||||
chunks: `all`,
|
||||
enforce: true,
|
||||
// Set priority higher than docusaurus single-css extraction
|
||||
// Set priority higher than docusaurus single-css extraction.
|
||||
priority: 60,
|
||||
},
|
||||
},
|
||||
|
|
|
@ -33,12 +33,13 @@ const Search = props => {
|
|||
// navigation and avoiding a full page refresh.
|
||||
handleSelected: (_input, _event, suggestion) => {
|
||||
// Use an anchor tag to parse the absolute url into a relative url
|
||||
// Alternatively, we can use new URL(suggestion.url) but its not supported in IE
|
||||
// Alternatively, we can use new URL(suggestion.url) but it's not supported in IE.
|
||||
const a = document.createElement('a');
|
||||
a.href = suggestion.url;
|
||||
|
||||
// Algolia use closest parent element id #__docusaurus when a h1 page title does not have an id
|
||||
// So, we can safely remove it. See https://github.com/facebook/docusaurus/issues/1828 for more details.
|
||||
// Algolia use closest parent element id #__docusaurus when a h1 page title does
|
||||
// not have an id, so we can safely remove it.
|
||||
// See https://github.com/facebook/docusaurus/issues/1828 for more details.
|
||||
const routePath =
|
||||
`#__docusaurus` === a.hash
|
||||
? `${a.pathname}`
|
||||
|
|
|
@ -29,9 +29,9 @@ export async function generate(
|
|||
|
||||
let lastHash = fileHash.get(filepath);
|
||||
|
||||
// If file already exist but its not in runtime cache hash yet,
|
||||
// If file already exists but its not in runtime cache yet,
|
||||
// we try to calculate the content hash and then compare
|
||||
// This is to avoid unnecessary overwrite and we can reuse old file
|
||||
// This is to avoid unnecessary overwriting and we can reuse old file.
|
||||
if (!lastHash && fs.existsSync(filepath)) {
|
||||
const lastContent = await fs.readFile(filepath, 'utf8');
|
||||
lastHash = createHash('md5')
|
||||
|
@ -64,7 +64,8 @@ const indexRE = /(^|.*\/)index\.(md|js|jsx|ts|tsx)$/i;
|
|||
const extRE = /\.(md|js)$/;
|
||||
|
||||
/**
|
||||
* Convert filepath to url path. Example: 'index.md' -> '/', 'foo/bar.js' -> '/foo/bar',
|
||||
* Convert filepath to url path.
|
||||
* Example: 'index.md' -> '/', 'foo/bar.js' -> '/foo/bar',
|
||||
*/
|
||||
export function fileToPath(file: string): string {
|
||||
if (indexRE.test(file)) {
|
||||
|
@ -81,7 +82,8 @@ export function encodePath(userpath: string): string {
|
|||
}
|
||||
|
||||
/**
|
||||
* Given an input string, convert to kebab-case and append a hash. Avoid str collision
|
||||
* Given an input string, convert to kebab-case and append a hash.
|
||||
* Avoid str collision.
|
||||
*/
|
||||
export function docuHash(str: string): string {
|
||||
if (str === '/') {
|
||||
|
@ -95,7 +97,8 @@ export function docuHash(str: string): string {
|
|||
}
|
||||
|
||||
/**
|
||||
* Generate unique React Component Name. E.g: /foo-bar -> FooBar096
|
||||
* Generate unique React Component Name.
|
||||
* E.g: /foo-bar -> FooBar096
|
||||
*/
|
||||
export function genComponentName(pagePath: string): string {
|
||||
if (pagePath === '/') {
|
||||
|
@ -107,7 +110,8 @@ export function genComponentName(pagePath: string): string {
|
|||
}
|
||||
|
||||
/**
|
||||
* Convert Windows backslash paths to posix style paths. E.g: endi\\lie -> endi/lie
|
||||
* Convert Windows backslash paths to posix style paths.
|
||||
* E.g: endi\\lie -> endi/lie
|
||||
*/
|
||||
export function posixPath(str: string): string {
|
||||
const isExtendedLengthPath = /^\\\\\?\\/.test(str);
|
||||
|
@ -121,7 +125,7 @@ export function posixPath(str: string): string {
|
|||
|
||||
const chunkNameCache = new Map();
|
||||
/**
|
||||
* Generate unique chunk name given a module path
|
||||
* Generate unique chunk name given a module path.
|
||||
*/
|
||||
export function genChunkName(
|
||||
modulePath: string,
|
||||
|
@ -164,7 +168,7 @@ export function idx(target: any, keyPaths?: string | (string | number)[]): any {
|
|||
}
|
||||
|
||||
/**
|
||||
* Given a filepath and dirpath, get the first directory
|
||||
* Given a filepath and dirpath, get the first directory.
|
||||
*/
|
||||
export function getSubFolder(file: string, refDir: string): string | null {
|
||||
const separator = escapeStringRegexp(path.sep);
|
||||
|
@ -193,6 +197,7 @@ export function parse(
|
|||
.shift();
|
||||
},
|
||||
};
|
||||
|
||||
const {data: frontMatter, content, excerpt} = matter(fileString, options);
|
||||
return {frontMatter, content, excerpt};
|
||||
}
|
||||
|
@ -238,27 +243,30 @@ export function normalizeUrl(rawUrls: string[]): string {
|
|||
}
|
||||
|
||||
let str = resultArray.join('/');
|
||||
// Each input component is now separated by a single slash except the possible first plain protocol part.
|
||||
// Each input component is now separated by a single slash
|
||||
// except the possible first plain protocol part.
|
||||
|
||||
// remove trailing slash before parameters or hash
|
||||
// Remove trailing slash before parameters or hash.
|
||||
str = str.replace(/\/(\?|&|#[^!])/g, '$1');
|
||||
|
||||
// replace ? in parameters with &
|
||||
// Replace ? in parameters with &.
|
||||
const parts = str.split('?');
|
||||
str = parts.shift() + (parts.length > 0 ? '?' : '') + parts.join('&');
|
||||
|
||||
// dedupe forward slashes
|
||||
// Dedupe forward slashes.
|
||||
str = str.replace(/^\/+/, '/');
|
||||
|
||||
return str;
|
||||
}
|
||||
|
||||
/**
|
||||
* Alias filepath relative to site directory, very useful so that we don't expose user's site structure.
|
||||
* Alias filepath relative to site directory, very useful so that we
|
||||
* don't expose user's site structure.
|
||||
* Example: some/path/to/website/docs/foo.md -> @site/docs/foo.md
|
||||
*/
|
||||
export function aliasedSitePath(filePath: string, siteDir: string) {
|
||||
const relativePath = path.relative(siteDir, filePath);
|
||||
// Cannot use path.join() as it resolves '../' and removes the '@site'. Let webpack loader resolve it.
|
||||
// Cannot use path.join() as it resolves '../' and removes
|
||||
// the '@site'. Let webpack loader resolve it.
|
||||
return `@site/${relativePath}`;
|
||||
}
|
||||
|
|
|
@ -15,7 +15,7 @@ const fetched = {};
|
|||
const loaded = {};
|
||||
|
||||
const isSlowConnection = () => {
|
||||
// if user is on slow or constrained connection
|
||||
// If user is on slow or constrained connection.
|
||||
if (`connection` in navigator) {
|
||||
if (
|
||||
(navigator.connection.effectiveType || ``).indexOf(`2g`) !== -1 &&
|
||||
|
@ -37,10 +37,10 @@ const docusaurus = {
|
|||
if (!canPrefetch(routePath)) {
|
||||
return false;
|
||||
}
|
||||
// prevent future duplicate prefetch of routePath
|
||||
// Prevent future duplicate prefetch of routePath.
|
||||
fetched[routePath] = true;
|
||||
|
||||
// Find all webpack chunk names needed
|
||||
// Find all webpack chunk names needed.
|
||||
const matches = matchRoutes(routes, routePath);
|
||||
const chunkNamesNeeded = matches.reduce((arr, match) => {
|
||||
const chunk = routesChunkNames[match.route.path];
|
||||
|
@ -52,25 +52,28 @@ const docusaurus = {
|
|||
return arr.concat(chunkNames);
|
||||
}, []);
|
||||
|
||||
// Prefetch all webpack chunk assets file needed
|
||||
// Prefetch all webpack chunk assets file needed.
|
||||
chunkNamesNeeded.forEach(chunkName => {
|
||||
// "__webpack_require__.gca" is a custom function provided by ChunkAssetPlugin
|
||||
// Pass it the chunkName or chunkId you want to load and it will return the URL for that chunk
|
||||
// "__webpack_require__.gca" is a custom function provided by ChunkAssetPlugin.
|
||||
// Pass it the chunkName or chunkId you want to load and it will return the URL for that chunk.
|
||||
// eslint-disable-next-line no-undef
|
||||
const chunkAsset = __webpack_require__.gca(chunkName);
|
||||
|
||||
// In some cases, webpack might decide to optimize further & hence the chunk asssets are merged to another chunk/previous chunk
|
||||
// Hence, we can safely filter it out/ dont need to load it
|
||||
// In some cases, webpack might decide to optimize further & hence the chunk assets are merged to another chunk/previous chunk.
|
||||
// Hence, we can safely filter it out/don't need to load it.
|
||||
if (chunkAsset && !/undefined/.test(chunkAsset)) {
|
||||
prefetchHelper(chunkAsset);
|
||||
}
|
||||
});
|
||||
|
||||
return true;
|
||||
},
|
||||
|
||||
preload: routePath => {
|
||||
if (!canPreload(routePath)) {
|
||||
return false;
|
||||
}
|
||||
|
||||
loaded[routePath] = true;
|
||||
preloadHelper(routes, routePath);
|
||||
return true;
|
||||
|
|
|
@ -30,10 +30,10 @@ function ComponentCreator(path) {
|
|||
https://github.com/jamiebuilds/react-loadable#declaring-which-modules-are-being-loaded
|
||||
Example:
|
||||
- optsLoader:
|
||||
{
|
||||
component: () => import('./Pages.js'),
|
||||
content.foo: () => import('./doc1.md'),
|
||||
}
|
||||
{
|
||||
component: () => import('./Pages.js'),
|
||||
content.foo: () => import('./doc1.md'),
|
||||
}
|
||||
- optsModules: ['./Pages.js', './doc1.md']
|
||||
- optsWebpack: [require.resolveWeak('./Pages.js'), require.resolveWeak('./doc1.md')]
|
||||
*/
|
||||
|
@ -55,7 +55,7 @@ function ComponentCreator(path) {
|
|||
modules: optsModules,
|
||||
webpack: () => optsWebpack,
|
||||
render: (loaded, props) => {
|
||||
// clone the original object since we don't want to alter the original.
|
||||
// Clone the original object since we don't want to alter the original.
|
||||
const loadedModules = JSON.parse(JSON.stringify(chunkNames));
|
||||
Object.keys(loaded).forEach(key => {
|
||||
let val = loadedModules;
|
||||
|
|
|
@ -7,7 +7,7 @@
|
|||
|
||||
import React, {useEffect, useRef} from 'react';
|
||||
import {NavLink} from 'react-router-dom';
|
||||
import isInternalUrl from '@docusaurus/utils';
|
||||
import isInternalUrl from '@docusaurus/isInternalUrl';
|
||||
|
||||
function Link(props) {
|
||||
const {to, href} = props;
|
||||
|
@ -23,7 +23,7 @@ function Link(props) {
|
|||
io = new window.IntersectionObserver(entries => {
|
||||
entries.forEach(entry => {
|
||||
if (el === entry.target) {
|
||||
// If element is in viewport, stop listening/observing & run callback.
|
||||
// If element is in viewport, stop listening/observing and run callback.
|
||||
// https://developer.mozilla.org/en-US/docs/Web/API/Intersection_Observer_API
|
||||
if (entry.isIntersecting || entry.intersectionRatio > 0) {
|
||||
io.unobserve(el);
|
||||
|
@ -33,13 +33,14 @@ function Link(props) {
|
|||
}
|
||||
});
|
||||
});
|
||||
// Add element to the observer
|
||||
|
||||
// Add element to the observer.
|
||||
io.observe(el);
|
||||
};
|
||||
|
||||
const handleRef = ref => {
|
||||
if (IOSupported && ref && isInternal) {
|
||||
// If IO supported and element reference found, setup Observer functionality
|
||||
// If IO supported and element reference found, setup Observer functionality.
|
||||
handleIntersection(ref, () => {
|
||||
window.docusaurus.prefetch(targetLink);
|
||||
});
|
||||
|
@ -54,11 +55,12 @@ function Link(props) {
|
|||
};
|
||||
|
||||
useEffect(() => {
|
||||
// If IO is not supported. We prefetch by default (only once)
|
||||
// If IO is not supported. We prefetch by default (only once).
|
||||
if (!IOSupported && isInternal) {
|
||||
window.docusaurus.prefetch(targetLink);
|
||||
}
|
||||
// when unmount, stops intersection observer from watching
|
||||
|
||||
// When unmounting, stop intersection observer from watching.
|
||||
return () => {
|
||||
if (IOSupported && io) {
|
||||
io.disconnect();
|
||||
|
|
|
@ -19,8 +19,10 @@ export default function useBaseUrl(url) {
|
|||
if (externalRegex.test(url)) {
|
||||
return url;
|
||||
}
|
||||
|
||||
if (url.startsWith('/')) {
|
||||
return baseUrl + url.slice(1);
|
||||
}
|
||||
|
||||
return baseUrl + url;
|
||||
}
|
||||
|
|
|
@ -23,6 +23,7 @@ function flat(target) {
|
|||
output[newKey] = value;
|
||||
});
|
||||
}
|
||||
|
||||
step(target);
|
||||
return output;
|
||||
}
|
||||
|
|
|
@ -5,10 +5,11 @@
|
|||
* LICENSE file in the root directory of this source tree.
|
||||
*/
|
||||
|
||||
const support = function(feature) {
|
||||
function support(feature) {
|
||||
if (typeof document === 'undefined') {
|
||||
return false;
|
||||
}
|
||||
|
||||
const fakeLink = document.createElement('link');
|
||||
try {
|
||||
if (fakeLink.relList && typeof fakeLink.relList.supports === 'function') {
|
||||
|
@ -17,10 +18,11 @@ const support = function(feature) {
|
|||
} catch (err) {
|
||||
return false;
|
||||
}
|
||||
return false;
|
||||
};
|
||||
|
||||
const linkPrefetchStrategy = function(url) {
|
||||
return false;
|
||||
}
|
||||
|
||||
function linkPrefetchStrategy(url) {
|
||||
return new Promise((resolve, reject) => {
|
||||
if (typeof document === 'undefined') {
|
||||
reject();
|
||||
|
@ -39,9 +41,9 @@ const linkPrefetchStrategy = function(url) {
|
|||
document.getElementsByName('script')[0].parentNode;
|
||||
parentElement.appendChild(link);
|
||||
});
|
||||
};
|
||||
}
|
||||
|
||||
const xhrPrefetchStrategy = function(url) {
|
||||
function xhrPrefetchStrategy(url) {
|
||||
return new Promise((resolve, reject) => {
|
||||
const req = new XMLHttpRequest();
|
||||
req.open('GET', url, true);
|
||||
|
@ -57,7 +59,7 @@ const xhrPrefetchStrategy = function(url) {
|
|||
|
||||
req.send(null);
|
||||
});
|
||||
};
|
||||
}
|
||||
|
||||
const supportedPrefetchStrategy = support('prefetch')
|
||||
? linkPrefetchStrategy
|
||||
|
@ -65,7 +67,7 @@ const supportedPrefetchStrategy = support('prefetch')
|
|||
|
||||
const preFetched = {};
|
||||
|
||||
const prefetch = function(url) {
|
||||
function prefetch(url) {
|
||||
return new Promise(resolve => {
|
||||
if (preFetched[url]) {
|
||||
resolve();
|
||||
|
@ -77,8 +79,8 @@ const prefetch = function(url) {
|
|||
resolve();
|
||||
preFetched[url] = true;
|
||||
})
|
||||
.catch(() => {}); // 404s are logged to the console anyway
|
||||
.catch(() => {}); // 404s are logged to the console anyway.
|
||||
});
|
||||
};
|
||||
}
|
||||
|
||||
export default prefetch;
|
||||
|
|
|
@ -8,8 +8,8 @@
|
|||
import {matchRoutes} from 'react-router-config';
|
||||
|
||||
/**
|
||||
* Helper function to make sure all async component for that particular route
|
||||
* is preloaded before rendering. This is especially useful to avoid loading screens
|
||||
* Helper function to make sure all async components for that particular route
|
||||
* is preloaded before rendering. This is especially useful to avoid loading screens.
|
||||
*
|
||||
* @param {Array<RouteConfig>} routes react-router-config
|
||||
* @param {string} pathname the route pathname, example: /docs/installation
|
||||
|
@ -17,12 +17,15 @@ import {matchRoutes} from 'react-router-config';
|
|||
*/
|
||||
export default function preload(routes, pathname) {
|
||||
const matches = matchRoutes(routes, pathname);
|
||||
|
||||
return Promise.all(
|
||||
matches.map(match => {
|
||||
const {component} = match.route;
|
||||
|
||||
if (component && component.preload) {
|
||||
return component.preload();
|
||||
}
|
||||
|
||||
return undefined;
|
||||
}),
|
||||
);
|
||||
|
|
|
@ -21,7 +21,7 @@ import preload from './preload';
|
|||
import App from './App';
|
||||
import ssrTemplate from './templates/ssr.html.template';
|
||||
|
||||
// Renderer for static-site-generator-webpack-plugin (async rendering via promises)
|
||||
// Renderer for static-site-generator-webpack-plugin (async rendering via promises).
|
||||
export default async function render(locals) {
|
||||
const {routesLocation, headTags, preBodyTags, postBodyTags} = locals;
|
||||
const location = routesLocation[locals.path];
|
||||
|
@ -50,7 +50,8 @@ export default async function render(locals) {
|
|||
const manifestPath = path.join(generatedFilesDir, 'client-manifest.json');
|
||||
const manifest = JSON.parse(await fs.readFile(manifestPath, 'utf8'));
|
||||
|
||||
// Get all required assets for this particular page based on client manifest information
|
||||
// Get all required assets for this particular page based on client
|
||||
// manifest information.
|
||||
const modulesToBeLoaded = [...manifest.entrypoints, ...Array.from(modules)];
|
||||
const bundles = getBundles(manifest, modulesToBeLoaded);
|
||||
const stylesheets = (bundles.css || []).map(b => b.file);
|
||||
|
@ -76,7 +77,7 @@ export default async function render(locals) {
|
|||
},
|
||||
);
|
||||
|
||||
// minify html with https://github.com/DanielRuf/html-minifier-terser
|
||||
// Minify html with https://github.com/DanielRuf/html-minifier-terser
|
||||
return minify(renderedHtml, {
|
||||
removeComments: true,
|
||||
removeRedundantAttributes: true,
|
||||
|
|
|
@ -67,7 +67,7 @@ export async function build(
|
|||
new CleanWebpackPlugin({verbose: false}),
|
||||
// Visualize size of webpack output files with an interactive zoomable treemap.
|
||||
cliOptions.bundleAnalyzer && new BundleAnalyzerPlugin(),
|
||||
// Generate client manifests file that will be used for server bundle
|
||||
// Generate client manifests file that will be used for server bundle.
|
||||
new ReactLoadableSSRAddon({
|
||||
filename: clientManifestPath,
|
||||
}),
|
||||
|
@ -90,17 +90,19 @@ export async function build(
|
|||
});
|
||||
}
|
||||
|
||||
// Plugin lifecycle - configureWebpack
|
||||
// Plugin Lifecycle - configureWebpack.
|
||||
plugins.forEach(plugin => {
|
||||
const {configureWebpack} = plugin;
|
||||
if (!configureWebpack) {
|
||||
return;
|
||||
}
|
||||
|
||||
clientConfig = applyConfigureWebpack(
|
||||
configureWebpack.bind(plugin), // The plugin lifecycle may reference `this`.
|
||||
clientConfig,
|
||||
false,
|
||||
);
|
||||
|
||||
serverConfig = applyConfigureWebpack(
|
||||
configureWebpack.bind(plugin), // The plugin lifecycle may reference `this`.
|
||||
serverConfig,
|
||||
|
@ -108,7 +110,8 @@ export async function build(
|
|||
);
|
||||
});
|
||||
|
||||
// Make sure generated client-manifest is cleaned first so we don't reuse the one from prevs build
|
||||
// Make sure generated client-manifest is cleaned first so we don't reuse
|
||||
// the one from previous builds.
|
||||
if (fs.existsSync(clientManifestPath)) {
|
||||
fs.unlinkSync(clientManifestPath);
|
||||
}
|
||||
|
@ -116,7 +119,7 @@ export async function build(
|
|||
// Run webpack to build JS bundle (client) and static html files (server).
|
||||
await compile([clientConfig, serverConfig]);
|
||||
|
||||
// Remove server.bundle.js because it is useless
|
||||
// Remove server.bundle.js because it is not needed.
|
||||
if (
|
||||
serverConfig.output &&
|
||||
serverConfig.output.filename &&
|
||||
|
@ -128,7 +131,7 @@ export async function build(
|
|||
});
|
||||
}
|
||||
|
||||
/* Plugin lifecycle - postBuild */
|
||||
// Plugin Lifecycle - postBuild.
|
||||
await Promise.all(
|
||||
plugins.map(async plugin => {
|
||||
if (!plugin.postBuild) {
|
||||
|
|
|
@ -27,7 +27,7 @@ export async function deploy(siteDir: string): Promise<void> {
|
|||
throw new Error(`Please set the GIT_USER`);
|
||||
}
|
||||
|
||||
// The branch that contains the latest docs changes that will be deployed
|
||||
// The branch that contains the latest docs changes that will be deployed.
|
||||
const currentBranch =
|
||||
process.env.CURRENT_BRANCH ||
|
||||
shell.exec('git rev-parse --abbrev-ref HEAD').stdout.trim();
|
||||
|
@ -52,7 +52,7 @@ export async function deploy(siteDir: string): Promise<void> {
|
|||
);
|
||||
}
|
||||
|
||||
// We never deploy on pull request
|
||||
// We never deploy on pull request.
|
||||
const isPullRequest =
|
||||
process.env.CI_PULL_REQUEST || process.env.CIRCLE_PULL_REQUEST;
|
||||
if (isPullRequest) {
|
||||
|
@ -72,7 +72,7 @@ export async function deploy(siteDir: string): Promise<void> {
|
|||
? `git@${githubHost}:${organizationName}/${projectName}.git`
|
||||
: `https://${gitUser}@${githubHost}/${organizationName}/${projectName}.git`;
|
||||
|
||||
// Check if this is a cross-repo publish
|
||||
// Check if this is a cross-repo publish.
|
||||
const currentRepoUrl = shell
|
||||
.exec('git config --get remote.origin.url')
|
||||
.stdout.trim();
|
||||
|
@ -80,21 +80,22 @@ export async function deploy(siteDir: string): Promise<void> {
|
|||
`${organizationName}/${projectName}.git`,
|
||||
);
|
||||
|
||||
// We don't allow deploying to the same branch unless it's a cross publish
|
||||
// We don't allow deploying to the same branch unless it's a cross publish.
|
||||
if (currentBranch === deploymentBranch && !crossRepoPublish) {
|
||||
throw new Error(
|
||||
`Cannot deploy from a ${deploymentBranch} branch. Only to it`,
|
||||
);
|
||||
}
|
||||
|
||||
// Save the commit hash that triggers publish-gh-pages before checking out to deployment branch
|
||||
// Save the commit hash that triggers publish-gh-pages before checking
|
||||
// out to deployment branch.
|
||||
const currentCommit = shell.exec('git rev-parse HEAD').stdout.trim();
|
||||
|
||||
// Clear docusaurus 2 cache dir for deploy consistency
|
||||
// Clear Docusaurus 2 cache dir for deploy consistency.
|
||||
const tempDir = path.join(siteDir, '.docusaurus');
|
||||
fs.removeSync(tempDir);
|
||||
|
||||
// build static html files, then push to deploymentBranch branch of specified repo
|
||||
// Build static html files, then push to deploymentBranch branch of specified repo.
|
||||
build(siteDir)
|
||||
.then(() => {
|
||||
shell.cd(tempDir);
|
||||
|
@ -109,8 +110,9 @@ export async function deploy(siteDir: string): Promise<void> {
|
|||
|
||||
shell.cd(`${projectName}-${deploymentBranch}`);
|
||||
|
||||
// If the default branch is the one we're deploying to, then we'll fail to create it.
|
||||
// This is the case of a cross-repo publish, where we clone a github.io repo with a default master branch.
|
||||
// If the default branch is the one we're deploying to, then we'll fail
|
||||
// to create it. This is the case of a cross-repo publish, where we clone
|
||||
// a github.io repo with a default master branch.
|
||||
const defaultBranch = shell
|
||||
.exec('git rev-parse --abbrev-ref HEAD')
|
||||
.stdout.trim();
|
||||
|
@ -164,8 +166,10 @@ export async function deploy(siteDir: string): Promise<void> {
|
|||
// The commit might return a non-zero value when site is up to date.
|
||||
const websiteURL =
|
||||
githubHost === 'github.com'
|
||||
? `https://${organizationName}.github.io/${projectName}` // gh-pages hosted repo
|
||||
: `https://${githubHost}/pages/${organizationName}/${projectName}`; // GitHub enterprise hosting.
|
||||
? // gh-pages hosted repo
|
||||
`https://${organizationName}.github.io/${projectName}`
|
||||
: // GitHub enterprise hosting.
|
||||
`https://${githubHost}/pages/${organizationName}/${projectName}`;
|
||||
shell.echo(`Website is live at: ${websiteURL}`);
|
||||
shell.exit(0);
|
||||
}
|
||||
|
|
|
@ -14,12 +14,14 @@ export function externalCommand(cli: CommanderStatic, siteDir: string): void {
|
|||
const pluginConfigs = loadPluginConfigs(context);
|
||||
const plugins = initPlugins({pluginConfigs, context});
|
||||
|
||||
// Plugin lifecycle - extendCli
|
||||
// Plugin Lifecycle - extendCli.
|
||||
plugins.forEach(plugin => {
|
||||
const {extendCli} = plugin;
|
||||
|
||||
if (!extendCli) {
|
||||
return;
|
||||
}
|
||||
|
||||
extendCli(cli);
|
||||
});
|
||||
}
|
||||
|
|
|
@ -92,7 +92,7 @@ export async function start(
|
|||
__dirname,
|
||||
'../client/templates/index.html.template.ejs',
|
||||
),
|
||||
// so we can define the position where the scripts are injected
|
||||
// So we can define the position where the scripts are injected.
|
||||
inject: false,
|
||||
filename: 'index.html',
|
||||
title: siteConfig.title,
|
||||
|
@ -100,12 +100,12 @@ export async function start(
|
|||
preBodyTags,
|
||||
postBodyTags,
|
||||
}),
|
||||
// This is necessary to emit hot updates for webpack-dev-server
|
||||
// This is necessary to emit hot updates for webpack-dev-server.
|
||||
new HotModuleReplacementPlugin(),
|
||||
],
|
||||
});
|
||||
|
||||
// Plugin lifecycle - configureWebpack
|
||||
// Plugin Lifecycle - configureWebpack.
|
||||
plugins.forEach(plugin => {
|
||||
const {configureWebpack} = plugin;
|
||||
if (!configureWebpack) {
|
||||
|
@ -137,13 +137,13 @@ export async function start(
|
|||
rewrites: [{from: /\/*/, to: baseUrl}],
|
||||
},
|
||||
disableHostCheck: true,
|
||||
// Disable overlay on browser since we use CRA's overlay error reporting
|
||||
// Disable overlay on browser since we use CRA's overlay error reporting.
|
||||
overlay: false,
|
||||
host,
|
||||
before: (app, server) => {
|
||||
app.use(baseUrl, express.static(path.resolve(siteDir, STATIC_DIR_NAME)));
|
||||
|
||||
// This lets us fetch source contents from webpack for the error overlay
|
||||
// This lets us fetch source contents from webpack for the error overlay.
|
||||
app.use(evalSourceMapMiddleware(server));
|
||||
// This lets us open files from the runtime error overlay.
|
||||
app.use(errorOverlayMiddleware());
|
||||
|
|
|
@ -29,7 +29,7 @@ export async function swizzle(
|
|||
fromPath = path.join(fromPath, componentName);
|
||||
toPath = path.join(toPath, componentName);
|
||||
|
||||
// Handle single js file only.
|
||||
// Handle single JavaScript file only.
|
||||
// E.g: if <fromPath> does not exist, we try to swizzle <fromPath>.js instead
|
||||
if (!fs.existsSync(fromPath) && fs.existsSync(`${fromPath}.js`)) {
|
||||
[fromPath, toPath] = [`${fromPath}.js`, `${toPath}.js`];
|
||||
|
|
|
@ -53,10 +53,12 @@ export function loadConfig(siteDir: string): DocusaurusConfig {
|
|||
if (!fs.existsSync(configPath)) {
|
||||
throw new Error(`${CONFIG_FILE_NAME} not found`);
|
||||
}
|
||||
|
||||
const loadedConfig = importFresh(configPath) as Partial<DocusaurusConfig>;
|
||||
const missingFields = REQUIRED_FIELDS.filter(
|
||||
field => !_.has(loadedConfig, field),
|
||||
);
|
||||
|
||||
if (missingFields.length > 0) {
|
||||
throw new Error(
|
||||
`The required field(s) ${formatFields(
|
||||
|
@ -76,6 +78,7 @@ export function loadConfig(siteDir: string): DocusaurusConfig {
|
|||
const unrecognizedFields = Object.keys(config).filter(
|
||||
field => !allowedFields.includes(field),
|
||||
);
|
||||
|
||||
if (unrecognizedFields && unrecognizedFields.length > 0) {
|
||||
throw new Error(
|
||||
`The field(s) ${formatFields(
|
||||
|
|
|
@ -44,6 +44,7 @@ export function loadHtmlTags(plugins: Plugin<any>[]): InjectedHtmlTags {
|
|||
},
|
||||
{headTags: '', preBodyTags: '', postBodyTags: ''},
|
||||
);
|
||||
|
||||
return {
|
||||
headTags: htmlTags.headTags.trim(),
|
||||
preBodyTags: htmlTags.preBodyTags.trim(),
|
||||
|
|
|
@ -52,14 +52,14 @@ export function loadPluginConfigs(context: LoadContext): PluginConfig[] {
|
|||
return [
|
||||
...presetPlugins,
|
||||
...presetThemes,
|
||||
// Site config should the highest priority.
|
||||
// Site config should be the highest priority.
|
||||
...(siteConfig.plugins || []),
|
||||
...(siteConfig.themes || []),
|
||||
];
|
||||
}
|
||||
|
||||
export async function load(siteDir: string): Promise<Props> {
|
||||
// Context
|
||||
// Context.
|
||||
const context: LoadContext = loadContext(siteDir);
|
||||
const {generatedFilesDir, siteConfig, outDir, baseUrl} = context;
|
||||
const genSiteConfig = generate(
|
||||
|
@ -68,7 +68,7 @@ export async function load(siteDir: string): Promise<Props> {
|
|||
`export default ${JSON.stringify(siteConfig, null, 2)};`,
|
||||
);
|
||||
|
||||
// Plugins
|
||||
// Plugins.
|
||||
const pluginConfigs: PluginConfig[] = loadPluginConfigs(context);
|
||||
const {plugins, pluginsRouteConfigs} = await loadPlugins({
|
||||
pluginConfigs,
|
||||
|
@ -83,7 +83,9 @@ export async function load(siteDir: string): Promise<Props> {
|
|||
const userTheme = path.resolve(siteDir, THEME_PATH);
|
||||
const alias = loadThemeAlias([fallbackTheme, ...pluginThemes, userTheme]);
|
||||
|
||||
// Make a fake plugin to resolve aliased theme components && inject scripts/stylesheets
|
||||
// Make a fake plugin to:
|
||||
// - Resolve aliased theme components
|
||||
// - Inject scripts/stylesheets
|
||||
const {stylesheets = [], scripts = []} = siteConfig;
|
||||
plugins.push({
|
||||
name: 'docusaurus-bootstrap-plugin',
|
||||
|
@ -134,10 +136,10 @@ export async function load(siteDir: string): Promise<Props> {
|
|||
.join('\n')}\n];\n`,
|
||||
);
|
||||
|
||||
// Load extra head & body html tags
|
||||
// Load extra head & body html tags.
|
||||
const {headTags, preBodyTags, postBodyTags} = loadHtmlTags(plugins);
|
||||
|
||||
// Routing
|
||||
// Routing.
|
||||
const {
|
||||
registry,
|
||||
routesChunkNames,
|
||||
|
|
|
@ -9,7 +9,7 @@ import path from 'path';
|
|||
import {load} from './index';
|
||||
import {Props} from '@docusaurus/types';
|
||||
|
||||
// Helper methods to setup dummy/fake projects
|
||||
// Helper methods to setup dummy/fake projects.
|
||||
export const loadSetup = async (name: string): Promise<Props> => {
|
||||
const fixtures = path.join(__dirname, '__tests__', '__fixtures__');
|
||||
const simpleSite = path.join(fixtures, 'simple-site');
|
||||
|
|
|
@ -18,7 +18,8 @@ import {
|
|||
import {initPlugins} from './init';
|
||||
|
||||
export function sortConfig(routeConfigs: RouteConfig[]) {
|
||||
// Sort the route config. This ensures that route with nested routes is always placed last
|
||||
// Sort the route config. This ensures that route with nested
|
||||
// routes is always placed last.
|
||||
routeConfigs.sort((a, b) => {
|
||||
if (a.routes && !b.routes) {
|
||||
return 1;
|
||||
|
@ -26,15 +27,17 @@ export function sortConfig(routeConfigs: RouteConfig[]) {
|
|||
if (!a.routes && b.routes) {
|
||||
return -1;
|
||||
}
|
||||
// Higher priority get placed first
|
||||
// Higher priority get placed first.
|
||||
if (a.priority || b.priority) {
|
||||
const priorityA = a.priority || 0;
|
||||
const priorityB = b.priority || 0;
|
||||
const score = priorityA > priorityB ? -1 : priorityB > priorityA ? 1 : 0;
|
||||
|
||||
if (score !== 0) {
|
||||
return score;
|
||||
}
|
||||
}
|
||||
|
||||
return a.path > b.path ? 1 : b.path > a.path ? -1 : 0;
|
||||
});
|
||||
}
|
||||
|
@ -49,24 +52,24 @@ export async function loadPlugins({
|
|||
plugins: Plugin<any>[];
|
||||
pluginsRouteConfigs: RouteConfig[];
|
||||
}> {
|
||||
// 1. Plugin Lifecycle - Initialization/Constructor
|
||||
// 1. Plugin Lifecycle - Initialization/Constructor.
|
||||
const plugins: Plugin<any>[] = initPlugins({pluginConfigs, context});
|
||||
|
||||
// 2. Plugin lifecycle - loadContent
|
||||
// Currently plugins run lifecycle in parallel and are not order-dependent. We could change
|
||||
// this in future if there are plugins which need to run in certain order or depend on
|
||||
// others for data.
|
||||
// 2. Plugin Lifecycle - loadContent.
|
||||
// Currently plugins run lifecycle methods in parallel and are not order-dependent.
|
||||
// We could change this in future if there are plugins which need to
|
||||
// run in certain order or depend on others for data.
|
||||
const pluginsLoadedContent = await Promise.all(
|
||||
plugins.map(async plugin => {
|
||||
if (!plugin.loadContent) {
|
||||
return null;
|
||||
}
|
||||
const content = await plugin.loadContent();
|
||||
return content;
|
||||
|
||||
return await plugin.loadContent();
|
||||
}),
|
||||
);
|
||||
|
||||
// 3. Plugin lifecycle - contentLoaded
|
||||
// 3. Plugin Lifecycle - contentLoaded.
|
||||
const pluginsRouteConfigs: RouteConfig[] = [];
|
||||
|
||||
await Promise.all(
|
||||
|
@ -97,7 +100,8 @@ export async function loadPlugins({
|
|||
}),
|
||||
);
|
||||
|
||||
// Sort the route config. This ensures that route with nested routes is always placed last
|
||||
// Sort the route config. This ensures that route with nested
|
||||
// routes are always placed last.
|
||||
sortConfig(pluginsRouteConfigs);
|
||||
|
||||
return {
|
||||
|
|
|
@ -20,6 +20,7 @@ export function initPlugins({
|
|||
pluginConfigs.map(pluginItem => {
|
||||
let pluginModuleImport;
|
||||
let pluginOptions = {};
|
||||
|
||||
if (!pluginItem) {
|
||||
return null;
|
||||
}
|
||||
|
@ -35,7 +36,8 @@ export function initPlugins({
|
|||
return null;
|
||||
}
|
||||
|
||||
// module is any valid module identifier - npm package or locally-resolved path.
|
||||
// The pluginModuleImport value is any valid
|
||||
// module identifier - npm package or locally-resolved path.
|
||||
const pluginModule: any = importFresh(pluginModuleImport);
|
||||
return (pluginModule.default || pluginModule)(context, pluginOptions);
|
||||
}),
|
||||
|
|
|
@ -54,7 +54,7 @@ export async function loadRoutes(
|
|||
[routePath: string]: ChunkNames;
|
||||
} = {};
|
||||
|
||||
// This is the higher level overview of route code generation
|
||||
// This is the higher level overview of route code generation.
|
||||
function generateRouteCode(routeConfig: RouteConfig): string {
|
||||
const {
|
||||
path: routePath,
|
||||
|
@ -94,7 +94,7 @@ export async function loadRoutes(
|
|||
if (isModule(value)) {
|
||||
const modulePath = getModulePath(value);
|
||||
const chunkName = genChunkName(modulePath, prefix, name);
|
||||
// We need to JSON.stringify so that if its on windows, backslash are escaped.
|
||||
// We need to JSON.stringify so that if its on windows, backslashes are escaped.
|
||||
const loader = `() => import(/* webpackChunkName: '${chunkName}' */ ${JSON.stringify(
|
||||
modulePath,
|
||||
)})`;
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue