mirror of
https://github.com/facebook/docusaurus.git
synced 2025-07-21 02:27:49 +02:00
chore(website): split changelog per version + adjust changelog plugin implementation (#11287)
This commit is contained in:
parent
e14caf1f78
commit
e82cd48842
5 changed files with 6921 additions and 6916 deletions
|
@ -21,6 +21,7 @@
|
|||
],
|
||||
"ignorePaths": [
|
||||
"CHANGELOG.md",
|
||||
"CHANGELOG-v*.md",
|
||||
"patches",
|
||||
"packages/docusaurus-theme-translations/locales",
|
||||
"packages/docusaurus-plugin-ideal-image/src/theme/IdealImageLegacy",
|
||||
|
|
6757
CHANGELOG-v2.md
Normal file
6757
CHANGELOG-v2.md
Normal file
File diff suppressed because it is too large
Load diff
6797
CHANGELOG.md
6797
CHANGELOG.md
File diff suppressed because it is too large
Load diff
|
@ -8,131 +8,42 @@
|
|||
import path from 'path';
|
||||
import fs from 'fs-extra';
|
||||
import pluginContentBlog from '@docusaurus/plugin-content-blog';
|
||||
import {aliasedSitePath, docuHash, normalizeUrl} from '@docusaurus/utils';
|
||||
import {
|
||||
aliasedSitePath,
|
||||
docuHash,
|
||||
normalizeUrl,
|
||||
safeGlobby,
|
||||
} from '@docusaurus/utils';
|
||||
import {createBlogFiles, toChangelogEntries} from './utils';
|
||||
|
||||
export {validateOptions} from '@docusaurus/plugin-content-blog';
|
||||
|
||||
/**
|
||||
* Multiple versions may be published on the same day, causing the order to be
|
||||
* the reverse. Therefore, our publish time has a "fake hour" to order them.
|
||||
*/
|
||||
// TODO may leak small amount of memory in multi-locale builds
|
||||
const publishTimes = new Set<string>();
|
||||
const MonorepoRoot = path.resolve(path.join(__dirname, '../../../..'));
|
||||
|
||||
type Author = {name: string; url: string; alias: string; imageURL: string};
|
||||
const ChangelogFilePattern = 'CHANGELOG(-v[0-9]*)?.md';
|
||||
|
||||
type AuthorsMap = Record<string, Author>;
|
||||
|
||||
type ChangelogEntry = {
|
||||
title: string;
|
||||
content: string;
|
||||
authors: Author[];
|
||||
};
|
||||
|
||||
function parseAuthor(committerLine: string): Author {
|
||||
const groups = committerLine.match(
|
||||
/- (?:(?<name>.*?) \()?\[@(?<alias>.*)\]\((?<url>.*?)\)\)?/,
|
||||
)!.groups as {name: string; alias: string; url: string};
|
||||
|
||||
return {
|
||||
...groups,
|
||||
name: groups.name ?? groups.alias,
|
||||
imageURL: `https://github.com/${groups.alias}.png`,
|
||||
};
|
||||
}
|
||||
|
||||
function parseAuthors(content: string): Author[] {
|
||||
const committersContent = content.match(/## Committers: \d.*/s)?.[0];
|
||||
if (!committersContent) {
|
||||
return [];
|
||||
}
|
||||
const committersLines = committersContent.match(/- .*/g)!;
|
||||
|
||||
const authors = committersLines
|
||||
.map(parseAuthor)
|
||||
.sort((a, b) => a.url.localeCompare(b.url));
|
||||
|
||||
return authors;
|
||||
}
|
||||
|
||||
function createAuthorsMap(changelogEntries: ChangelogEntry[]): AuthorsMap {
|
||||
const allAuthors = changelogEntries.flatMap((entry) => entry.authors);
|
||||
const authorsMap: AuthorsMap = {};
|
||||
allAuthors?.forEach((author) => {
|
||||
authorsMap[author.alias] = author;
|
||||
async function getChangelogFiles() {
|
||||
const files = await safeGlobby([ChangelogFilePattern], {
|
||||
cwd: MonorepoRoot,
|
||||
});
|
||||
return authorsMap;
|
||||
}
|
||||
|
||||
function toChangelogEntry(sectionContent: string): ChangelogEntry | null {
|
||||
const title = sectionContent
|
||||
.match(/\n## .*/)?.[0]
|
||||
.trim()
|
||||
.replace('## ', '');
|
||||
if (!title) {
|
||||
return null;
|
||||
// As of today, there are 2 changelog files
|
||||
// and this is only going to increase
|
||||
if (files.length < 2) {
|
||||
throw new Error(
|
||||
"Looks like the changelog plugin didn't detect Docusaurus changelog files",
|
||||
);
|
||||
}
|
||||
const content = sectionContent
|
||||
.replace(/\n## .*/, '')
|
||||
.trim()
|
||||
.replace('running_woman', 'running');
|
||||
|
||||
const authors = parseAuthors(content);
|
||||
|
||||
let hour = 20;
|
||||
const date = title.match(/ \((?<date>.*)\)/)?.groups!.date;
|
||||
while (publishTimes.has(`${date}T${hour}:00`)) {
|
||||
hour -= 1;
|
||||
}
|
||||
publishTimes.add(`${date}T${hour}:00`);
|
||||
|
||||
return {
|
||||
authors,
|
||||
title: title.replace(/ \(.*\)/, ''),
|
||||
content: `---
|
||||
mdx:
|
||||
format: md
|
||||
date: ${`${date}T${hour}:00`}${
|
||||
authors.length > 0
|
||||
? `
|
||||
authors:
|
||||
${authors.map((author) => ` - '${author.alias}'`).join('\n')}`
|
||||
: ''
|
||||
}
|
||||
---
|
||||
|
||||
# ${title.replace(/ \(.*\)/, '')}
|
||||
|
||||
<!-- truncate -->
|
||||
|
||||
${content.replace(/####/g, '##')}`,
|
||||
};
|
||||
// Note: the returned file order doesn't matter.
|
||||
return files;
|
||||
}
|
||||
|
||||
function toChangelogEntries(fileContent: string): ChangelogEntry[] {
|
||||
return fileContent
|
||||
.split(/(?=\n## )/)
|
||||
.map(toChangelogEntry)
|
||||
.filter((s): s is ChangelogEntry => s !== null);
|
||||
function readChangelogFile(filename: string) {
|
||||
return fs.readFile(path.join(MonorepoRoot, filename), 'utf-8');
|
||||
}
|
||||
|
||||
async function createBlogFiles(
|
||||
generateDir: string,
|
||||
changelogEntries: ChangelogEntry[],
|
||||
) {
|
||||
await Promise.all(
|
||||
changelogEntries.map((changelogEntry) =>
|
||||
fs.outputFile(
|
||||
path.join(generateDir, `${changelogEntry.title}.md`),
|
||||
changelogEntry.content,
|
||||
),
|
||||
),
|
||||
);
|
||||
|
||||
await fs.outputFile(
|
||||
path.join(generateDir, 'authors.json'),
|
||||
JSON.stringify(createAuthorsMap(changelogEntries), null, 2),
|
||||
);
|
||||
async function loadChangelogEntries(changelogFiles: string[]) {
|
||||
const filesContent = await Promise.all(changelogFiles.map(readChangelogFile));
|
||||
return toChangelogEntries(filesContent);
|
||||
}
|
||||
|
||||
const ChangelogPlugin: typeof pluginContentBlog =
|
||||
|
@ -145,14 +56,14 @@ const ChangelogPlugin: typeof pluginContentBlog =
|
|||
blogListComponent: '@theme/ChangelogList',
|
||||
blogPostComponent: '@theme/ChangelogPage',
|
||||
});
|
||||
const changelogPath = path.join(__dirname, '../../../../CHANGELOG.md');
|
||||
const changelogFiles = await getChangelogFiles();
|
||||
|
||||
return {
|
||||
...blogPlugin,
|
||||
name: 'changelog-plugin',
|
||||
|
||||
async loadContent() {
|
||||
const fileContent = await fs.readFile(changelogPath, 'utf-8');
|
||||
const changelogEntries = toChangelogEntries(fileContent);
|
||||
const changelogEntries = await loadChangelogEntries(changelogFiles);
|
||||
|
||||
// We have to create intermediate files here
|
||||
// Unfortunately Docusaurus doesn't have yet any concept of virtual file
|
||||
|
@ -199,8 +110,7 @@ const ChangelogPlugin: typeof pluginContentBlog =
|
|||
},
|
||||
|
||||
getPathsToWatch() {
|
||||
// Don't watch the generated dir
|
||||
return [changelogPath];
|
||||
return [path.join(MonorepoRoot, ChangelogFilePattern)];
|
||||
},
|
||||
};
|
||||
};
|
||||
|
|
134
website/src/plugins/changelog/utils.ts
Normal file
134
website/src/plugins/changelog/utils.ts
Normal file
|
@ -0,0 +1,134 @@
|
|||
/**
|
||||
* Copyright (c) Facebook, Inc. and its affiliates.
|
||||
*
|
||||
* This source code is licensed under the MIT license found in the
|
||||
* LICENSE file in the root directory of this source tree.
|
||||
*/
|
||||
|
||||
import fs from 'fs-extra';
|
||||
import path from 'path';
|
||||
|
||||
/**
|
||||
* Multiple versions may be published on the same day, causing the order to be
|
||||
* the reverse. Therefore, our publish time has a "fake hour" to order them.
|
||||
*/
|
||||
// TODO may leak small amount of memory in multi-locale builds
|
||||
const publishTimes = new Set<string>();
|
||||
|
||||
type Author = {name: string; url: string; alias: string; imageURL: string};
|
||||
|
||||
type AuthorsMap = Record<string, Author>;
|
||||
|
||||
type ChangelogEntry = {
|
||||
title: string;
|
||||
content: string;
|
||||
authors: Author[];
|
||||
};
|
||||
|
||||
function parseAuthor(committerLine: string): Author {
|
||||
const groups = committerLine.match(
|
||||
/- (?:(?<name>.*?) \()?\[@(?<alias>.*)\]\((?<url>.*?)\)\)?/,
|
||||
)!.groups as {name: string; alias: string; url: string};
|
||||
|
||||
return {
|
||||
...groups,
|
||||
name: groups.name ?? groups.alias,
|
||||
imageURL: `https://github.com/${groups.alias}.png`,
|
||||
};
|
||||
}
|
||||
|
||||
function parseAuthors(content: string): Author[] {
|
||||
const committersContent = content.match(/## Committers: \d.*/s)?.[0];
|
||||
if (!committersContent) {
|
||||
return [];
|
||||
}
|
||||
const committersLines = committersContent.match(/- .*/g)!;
|
||||
|
||||
const authors = committersLines
|
||||
.map(parseAuthor)
|
||||
.sort((a, b) => a.url.localeCompare(b.url));
|
||||
|
||||
return authors;
|
||||
}
|
||||
|
||||
export function createAuthorsMap(
|
||||
changelogEntries: ChangelogEntry[],
|
||||
): AuthorsMap {
|
||||
const allAuthors = changelogEntries.flatMap((entry) => entry.authors);
|
||||
const authorsMap: AuthorsMap = {};
|
||||
allAuthors?.forEach((author) => {
|
||||
authorsMap[author.alias] = author;
|
||||
});
|
||||
return authorsMap;
|
||||
}
|
||||
|
||||
function toChangelogEntry(sectionContent: string): ChangelogEntry | null {
|
||||
const title = sectionContent
|
||||
.match(/\n## .*/)?.[0]
|
||||
.trim()
|
||||
.replace('## ', '');
|
||||
if (!title) {
|
||||
return null;
|
||||
}
|
||||
const content = sectionContent
|
||||
.replace(/\n## .*/, '')
|
||||
.trim()
|
||||
.replace('running_woman', 'running');
|
||||
|
||||
const authors = parseAuthors(content);
|
||||
|
||||
let hour = 20;
|
||||
const date = title.match(/ \((?<date>.*)\)/)?.groups!.date;
|
||||
while (publishTimes.has(`${date}T${hour}:00`)) {
|
||||
hour -= 1;
|
||||
}
|
||||
publishTimes.add(`${date}T${hour}:00`);
|
||||
|
||||
return {
|
||||
authors,
|
||||
title: title.replace(/ \(.*\)/, ''),
|
||||
content: `---
|
||||
mdx:
|
||||
format: md
|
||||
date: ${`${date}T${hour}:00`}${
|
||||
authors.length > 0
|
||||
? `
|
||||
authors:
|
||||
${authors.map((author) => ` - '${author.alias}'`).join('\n')}`
|
||||
: ''
|
||||
}
|
||||
---
|
||||
|
||||
# ${title.replace(/ \(.*\)/, '')}
|
||||
|
||||
<!-- truncate -->
|
||||
|
||||
${content.replace(/####/g, '##')}`,
|
||||
};
|
||||
}
|
||||
|
||||
export function toChangelogEntries(filesContent: string[]): ChangelogEntry[] {
|
||||
return filesContent
|
||||
.flatMap((content) => content.split(/(?=\n## )/))
|
||||
.map(toChangelogEntry)
|
||||
.filter((s): s is ChangelogEntry => s !== null);
|
||||
}
|
||||
|
||||
export async function createBlogFiles(
|
||||
generateDir: string,
|
||||
changelogEntries: ChangelogEntry[],
|
||||
): Promise<void> {
|
||||
await Promise.all(
|
||||
changelogEntries.map((changelogEntry) =>
|
||||
fs.outputFile(
|
||||
path.join(generateDir, `${changelogEntry.title}.md`),
|
||||
changelogEntry.content,
|
||||
),
|
||||
),
|
||||
);
|
||||
|
||||
await fs.outputFile(
|
||||
path.join(generateDir, 'authors.json'),
|
||||
JSON.stringify(createAuthorsMap(changelogEntries), null, 2),
|
||||
);
|
||||
}
|
Loading…
Add table
Add a link
Reference in a new issue