chore(website): split changelog per version + adjust changelog plugin implementation (#11287)

This commit is contained in:
Sébastien Lorber 2025-06-24 17:05:58 +02:00 committed by GitHub
parent e14caf1f78
commit e82cd48842
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
5 changed files with 6921 additions and 6916 deletions

View file

@ -21,6 +21,7 @@
], ],
"ignorePaths": [ "ignorePaths": [
"CHANGELOG.md", "CHANGELOG.md",
"CHANGELOG-v*.md",
"patches", "patches",
"packages/docusaurus-theme-translations/locales", "packages/docusaurus-theme-translations/locales",
"packages/docusaurus-plugin-ideal-image/src/theme/IdealImageLegacy", "packages/docusaurus-plugin-ideal-image/src/theme/IdealImageLegacy",

6757
CHANGELOG-v2.md Normal file

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

View file

@ -8,131 +8,42 @@
import path from 'path'; import path from 'path';
import fs from 'fs-extra'; import fs from 'fs-extra';
import pluginContentBlog from '@docusaurus/plugin-content-blog'; import pluginContentBlog from '@docusaurus/plugin-content-blog';
import {aliasedSitePath, docuHash, normalizeUrl} from '@docusaurus/utils'; import {
aliasedSitePath,
docuHash,
normalizeUrl,
safeGlobby,
} from '@docusaurus/utils';
import {createBlogFiles, toChangelogEntries} from './utils';
export {validateOptions} from '@docusaurus/plugin-content-blog'; export {validateOptions} from '@docusaurus/plugin-content-blog';
/** const MonorepoRoot = path.resolve(path.join(__dirname, '../../../..'));
* Multiple versions may be published on the same day, causing the order to be
* the reverse. Therefore, our publish time has a "fake hour" to order them.
*/
// TODO may leak small amount of memory in multi-locale builds
const publishTimes = new Set<string>();
type Author = {name: string; url: string; alias: string; imageURL: string}; const ChangelogFilePattern = 'CHANGELOG(-v[0-9]*)?.md';
type AuthorsMap = Record<string, Author>; async function getChangelogFiles() {
const files = await safeGlobby([ChangelogFilePattern], {
type ChangelogEntry = { cwd: MonorepoRoot,
title: string;
content: string;
authors: Author[];
};
function parseAuthor(committerLine: string): Author {
const groups = committerLine.match(
/- (?:(?<name>.*?) \()?\[@(?<alias>.*)\]\((?<url>.*?)\)\)?/,
)!.groups as {name: string; alias: string; url: string};
return {
...groups,
name: groups.name ?? groups.alias,
imageURL: `https://github.com/${groups.alias}.png`,
};
}
function parseAuthors(content: string): Author[] {
const committersContent = content.match(/## Committers: \d.*/s)?.[0];
if (!committersContent) {
return [];
}
const committersLines = committersContent.match(/- .*/g)!;
const authors = committersLines
.map(parseAuthor)
.sort((a, b) => a.url.localeCompare(b.url));
return authors;
}
function createAuthorsMap(changelogEntries: ChangelogEntry[]): AuthorsMap {
const allAuthors = changelogEntries.flatMap((entry) => entry.authors);
const authorsMap: AuthorsMap = {};
allAuthors?.forEach((author) => {
authorsMap[author.alias] = author;
}); });
return authorsMap; // As of today, there are 2 changelog files
} // and this is only going to increase
if (files.length < 2) {
function toChangelogEntry(sectionContent: string): ChangelogEntry | null { throw new Error(
const title = sectionContent "Looks like the changelog plugin didn't detect Docusaurus changelog files",
.match(/\n## .*/)?.[0] );
.trim()
.replace('## ', '');
if (!title) {
return null;
} }
const content = sectionContent // Note: the returned file order doesn't matter.
.replace(/\n## .*/, '') return files;
.trim()
.replace('running_woman', 'running');
const authors = parseAuthors(content);
let hour = 20;
const date = title.match(/ \((?<date>.*)\)/)?.groups!.date;
while (publishTimes.has(`${date}T${hour}:00`)) {
hour -= 1;
}
publishTimes.add(`${date}T${hour}:00`);
return {
authors,
title: title.replace(/ \(.*\)/, ''),
content: `---
mdx:
format: md
date: ${`${date}T${hour}:00`}${
authors.length > 0
? `
authors:
${authors.map((author) => ` - '${author.alias}'`).join('\n')}`
: ''
}
---
# ${title.replace(/ \(.*\)/, '')}
<!-- truncate -->
${content.replace(/####/g, '##')}`,
};
} }
function toChangelogEntries(fileContent: string): ChangelogEntry[] { function readChangelogFile(filename: string) {
return fileContent return fs.readFile(path.join(MonorepoRoot, filename), 'utf-8');
.split(/(?=\n## )/)
.map(toChangelogEntry)
.filter((s): s is ChangelogEntry => s !== null);
} }
async function createBlogFiles( async function loadChangelogEntries(changelogFiles: string[]) {
generateDir: string, const filesContent = await Promise.all(changelogFiles.map(readChangelogFile));
changelogEntries: ChangelogEntry[], return toChangelogEntries(filesContent);
) {
await Promise.all(
changelogEntries.map((changelogEntry) =>
fs.outputFile(
path.join(generateDir, `${changelogEntry.title}.md`),
changelogEntry.content,
),
),
);
await fs.outputFile(
path.join(generateDir, 'authors.json'),
JSON.stringify(createAuthorsMap(changelogEntries), null, 2),
);
} }
const ChangelogPlugin: typeof pluginContentBlog = const ChangelogPlugin: typeof pluginContentBlog =
@ -145,14 +56,14 @@ const ChangelogPlugin: typeof pluginContentBlog =
blogListComponent: '@theme/ChangelogList', blogListComponent: '@theme/ChangelogList',
blogPostComponent: '@theme/ChangelogPage', blogPostComponent: '@theme/ChangelogPage',
}); });
const changelogPath = path.join(__dirname, '../../../../CHANGELOG.md'); const changelogFiles = await getChangelogFiles();
return { return {
...blogPlugin, ...blogPlugin,
name: 'changelog-plugin', name: 'changelog-plugin',
async loadContent() { async loadContent() {
const fileContent = await fs.readFile(changelogPath, 'utf-8'); const changelogEntries = await loadChangelogEntries(changelogFiles);
const changelogEntries = toChangelogEntries(fileContent);
// We have to create intermediate files here // We have to create intermediate files here
// Unfortunately Docusaurus doesn't have yet any concept of virtual file // Unfortunately Docusaurus doesn't have yet any concept of virtual file
@ -199,8 +110,7 @@ const ChangelogPlugin: typeof pluginContentBlog =
}, },
getPathsToWatch() { getPathsToWatch() {
// Don't watch the generated dir return [path.join(MonorepoRoot, ChangelogFilePattern)];
return [changelogPath];
}, },
}; };
}; };

View file

@ -0,0 +1,134 @@
/**
* Copyright (c) Facebook, Inc. and its affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree.
*/
import fs from 'fs-extra';
import path from 'path';
/**
* Multiple versions may be published on the same day, causing the order to be
* the reverse. Therefore, our publish time has a "fake hour" to order them.
*/
// TODO may leak small amount of memory in multi-locale builds
const publishTimes = new Set<string>();
type Author = {name: string; url: string; alias: string; imageURL: string};
type AuthorsMap = Record<string, Author>;
type ChangelogEntry = {
title: string;
content: string;
authors: Author[];
};
function parseAuthor(committerLine: string): Author {
const groups = committerLine.match(
/- (?:(?<name>.*?) \()?\[@(?<alias>.*)\]\((?<url>.*?)\)\)?/,
)!.groups as {name: string; alias: string; url: string};
return {
...groups,
name: groups.name ?? groups.alias,
imageURL: `https://github.com/${groups.alias}.png`,
};
}
function parseAuthors(content: string): Author[] {
const committersContent = content.match(/## Committers: \d.*/s)?.[0];
if (!committersContent) {
return [];
}
const committersLines = committersContent.match(/- .*/g)!;
const authors = committersLines
.map(parseAuthor)
.sort((a, b) => a.url.localeCompare(b.url));
return authors;
}
export function createAuthorsMap(
changelogEntries: ChangelogEntry[],
): AuthorsMap {
const allAuthors = changelogEntries.flatMap((entry) => entry.authors);
const authorsMap: AuthorsMap = {};
allAuthors?.forEach((author) => {
authorsMap[author.alias] = author;
});
return authorsMap;
}
function toChangelogEntry(sectionContent: string): ChangelogEntry | null {
const title = sectionContent
.match(/\n## .*/)?.[0]
.trim()
.replace('## ', '');
if (!title) {
return null;
}
const content = sectionContent
.replace(/\n## .*/, '')
.trim()
.replace('running_woman', 'running');
const authors = parseAuthors(content);
let hour = 20;
const date = title.match(/ \((?<date>.*)\)/)?.groups!.date;
while (publishTimes.has(`${date}T${hour}:00`)) {
hour -= 1;
}
publishTimes.add(`${date}T${hour}:00`);
return {
authors,
title: title.replace(/ \(.*\)/, ''),
content: `---
mdx:
format: md
date: ${`${date}T${hour}:00`}${
authors.length > 0
? `
authors:
${authors.map((author) => ` - '${author.alias}'`).join('\n')}`
: ''
}
---
# ${title.replace(/ \(.*\)/, '')}
<!-- truncate -->
${content.replace(/####/g, '##')}`,
};
}
export function toChangelogEntries(filesContent: string[]): ChangelogEntry[] {
return filesContent
.flatMap((content) => content.split(/(?=\n## )/))
.map(toChangelogEntry)
.filter((s): s is ChangelogEntry => s !== null);
}
export async function createBlogFiles(
generateDir: string,
changelogEntries: ChangelogEntry[],
): Promise<void> {
await Promise.all(
changelogEntries.map((changelogEntry) =>
fs.outputFile(
path.join(generateDir, `${changelogEntry.title}.md`),
changelogEntry.content,
),
),
);
await fs.outputFile(
path.join(generateDir, 'authors.json'),
JSON.stringify(createAuthorsMap(changelogEntries), null, 2),
);
}