chore: release Docusaurus v3.1 (#9705)

Co-authored-by: Joshua Chen <sidachen2003@gmail.com>
Co-authored-by: sebastienlorber <lorber.sebastien@gmail.com>
Co-authored-by: Sébastien Lorber <slorber@users.noreply.github.com>
Co-authored-by: Ivan Mar (sOkam!) <7308253+heysokam@users.noreply.github.com>
Co-authored-by: c0h1b4 <dwidman@gmail.com>
Co-authored-by: Janessa Garrow <janessa.garrow@gmail.com>
Co-authored-by: ozaki <29860391+OzakIOne@users.noreply.github.com>
Co-authored-by: axmmisaka <6500159+axmmisaka@users.noreply.github.com>
Co-authored-by: Tatsunori Uchino <tats.u@live.jp>
Co-authored-by: Simen Bekkhus <sbekkhus91@gmail.com>
fix(i18n): complete translations for theme-common.json Brazilian Portuguese (pt-BR) (#9477)
fix(content-blog): add baseUrl for author.image_url (#9581)
fix(type-aliases): add `title` prop for imported inline SVG React components (#9612)
fix(utils): Markdown link replacement with <> but no spaces (#9617)
fix(live-codeblock): stabilize react-live transformCode callback, fix editor/preview desync (#9631)
fix(cli): output help when no conventional config + no subcommand (#9648)
fix CI job (#9604)
fix Lint Autofix workflow (#9632)
fix(pwa-plugin): upgrade workbox (#9668)
fix(create-docusaurus): fix init template code blocks, and little improvements (#9696)
fix(theme): allow empty code blocks and live playgrounds (#9704)
This commit is contained in:
Sébastien Lorber 2024-01-05 19:46:35 +01:00 committed by GitHub
parent a2e05d2118
commit 7b1b89041f
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
140 changed files with 3187 additions and 1707 deletions

44
.github/workflows/lint-autofix.yml vendored Normal file
View file

@ -0,0 +1,44 @@
name: Lint AutoFix
on:
pull_request:
branches:
- main
- docusaurus-v**
concurrency:
group: ${{ github.workflow }}-${{ github.head_ref || github.run_id }}
cancel-in-progress: true
jobs:
lint-autofix:
name: Lint AutoFix
runs-on: ubuntu-latest
permissions:
contents: write
steps:
- uses: actions/checkout@v4
with:
repository: ${{ github.event.pull_request.head.repo.full_name }}
ref: ${{ github.head_ref }}
- name: Installation
run: yarn
- name: AutoFix Format
run: yarn format
- name: AutoFix JS
run: yarn lint:js:fix
- name: AutoFix Style
run: yarn lint:style:fix
- name: AutoFix Spelling
run: yarn lint:spelling:fix
- uses: stefanzweifel/git-auto-commit-action@v5
with:
commit_message: 'refactor: apply lint autofix'

View file

@ -1,8 +1,5 @@
{
"*.{js,jsx,ts,tsx,mjs}": ["eslint --fix"],
"*.css": ["stylelint --allow-empty-input --fix"],
"*": [
"prettier --ignore-unknown --write",
"cspell --no-must-find-files --no-progress"
]
"*": ["prettier --ignore-unknown --write"]
}

View file

@ -24,5 +24,5 @@ website/versioned_sidebars/*.json
examples/
website/static/katex/katex.min.css
website/changelog/_swizzle_theme_tests
website/changelog
website/_dogfooding/_swizzle_theme_tests

View file

@ -1,6 +1,6 @@
{
"name": "new.docusaurus.io",
"version": "3.0.1",
"version": "3.1.0",
"private": true,
"scripts": {
"start": "npx --package netlify-cli netlify dev"

View file

@ -1,6 +1,6 @@
{
"name": "argos",
"version": "3.0.1",
"version": "3.1.0",
"description": "Argos visual diff tests",
"license": "MIT",
"private": true,

View file

@ -1,5 +1,5 @@
{
"version": "3.0.1",
"version": "3.1.0",
"npmClient": "yarn",
"useWorkspaces": true,
"useNx": false,

View file

@ -51,8 +51,11 @@
"lint": "yarn lint:js && yarn lint:style && yarn lint:spelling",
"lint:ci": "yarn lint:js --quiet && yarn lint:style && yarn lint:spelling",
"lint:js": "eslint --cache --report-unused-disable-directives \"**/*.{js,jsx,ts,tsx,mjs}\"",
"lint:spelling": "cspell \"**\" --no-progress",
"lint:js:fix": "yarn lint:js --fix",
"lint:spelling": "cspell \"**\" --no-progress --show-context --show-suggestions",
"lint:spelling:fix": "yarn rimraf project-words.txt && echo \"# Project Words - DO NOT TOUCH - This is updated through CI\" >> project-words.txt && yarn -s lint:spelling --words-only --unique --no-exit-code --no-summary \"**\" | sort --ignore-case >> project-words.txt",
"lint:style": "stylelint \"**/*.css\"",
"lint:style:fix": "yarn lint:style --fix",
"lerna": "lerna",
"test": "jest",
"test:build:website": "./admin/scripts/test-release.sh",
@ -80,7 +83,7 @@
"@typescript-eslint/eslint-plugin": "^5.62.0",
"@typescript-eslint/parser": "^5.62.0",
"cross-env": "^7.0.3",
"cspell": "^6.31.2",
"cspell": "^8.1.0",
"eslint": "^8.45.0",
"eslint-config-airbnb": "^19.0.4",
"eslint-config-prettier": "^8.8.0",

View file

@ -10,6 +10,10 @@ npm init docusaurus
yarn create docusaurus
```
```bash
npx create-docusaurus@latest
```
## Usage
Please see the [installation documentation](https://docusaurus.io/docs/installation).

View file

@ -1,6 +1,6 @@
{
"name": "create-docusaurus",
"version": "3.0.1",
"version": "3.1.0",
"description": "Create Docusaurus apps easily.",
"type": "module",
"repository": {
@ -22,8 +22,8 @@
},
"license": "MIT",
"dependencies": {
"@docusaurus/logger": "3.0.1",
"@docusaurus/utils": "3.0.1",
"@docusaurus/logger": "3.1.0",
"@docusaurus/utils": "3.1.0",
"commander": "^5.1.0",
"fs-extra": "^11.1.1",
"lodash": "^4.17.21",

View file

@ -1,6 +1,6 @@
{
"name": "docusaurus-2-classic-typescript-template",
"version": "3.0.1",
"version": "3.1.0",
"private": true,
"scripts": {
"docusaurus": "docusaurus",
@ -15,8 +15,8 @@
"typecheck": "tsc"
},
"dependencies": {
"@docusaurus/core": "3.0.1",
"@docusaurus/preset-classic": "3.0.1",
"@docusaurus/core": "3.1.0",
"@docusaurus/preset-classic": "3.1.0",
"@mdx-js/react": "^3.0.0",
"clsx": "^2.0.0",
"prism-react-renderer": "^2.3.0",
@ -24,9 +24,9 @@
"react-dom": "^18.0.0"
},
"devDependencies": {
"@docusaurus/module-type-aliases": "3.0.1",
"@docusaurus/tsconfig": "3.0.1",
"@docusaurus/types": "3.0.1",
"@docusaurus/module-type-aliases": "3.1.0",
"@docusaurus/tsconfig": "3.1.0",
"@docusaurus/types": "3.1.0",
"typescript": "~5.2.2"
},
"browserslist": {

View file

@ -1,6 +1,6 @@
{
"name": "docusaurus-2-classic-template",
"version": "3.0.1",
"version": "3.1.0",
"private": true,
"scripts": {
"docusaurus": "docusaurus",
@ -14,8 +14,8 @@
"write-heading-ids": "docusaurus write-heading-ids"
},
"dependencies": {
"@docusaurus/core": "3.0.1",
"@docusaurus/preset-classic": "3.0.1",
"@docusaurus/core": "3.1.0",
"@docusaurus/preset-classic": "3.1.0",
"@mdx-js/react": "^3.0.0",
"clsx": "^2.0.0",
"prism-react-renderer": "^2.3.0",
@ -23,8 +23,8 @@
"react-dom": "^18.0.0"
},
"devDependencies": {
"@docusaurus/module-type-aliases": "3.0.1",
"@docusaurus/types": "3.0.1"
"@docusaurus/module-type-aliases": "3.1.0",
"@docusaurus/types": "3.1.0"
},
"browserslist": {
"production": [

View file

@ -61,13 +61,13 @@ You can reference images relative to the current file as well. This is particula
Markdown code blocks are supported with Syntax highlighting.
````md
```jsx title="src/components/HelloDocusaurus.js"
function HelloDocusaurus() {
return (
<h1>Hello, Docusaurus!</h1>
)
return <h1>Hello, Docusaurus!</h1>;
}
```
````
```jsx title="src/components/HelloDocusaurus.js"
function HelloDocusaurus() {
@ -79,6 +79,7 @@ function HelloDocusaurus() {
Docusaurus has a special syntax to create admonitions and callouts:
```md
:::tip My tip
Use this awesome feature option
@ -90,6 +91,7 @@ Docusaurus has a special syntax to create admonitions and callouts:
This action is dangerous
:::
```
:::tip My tip

View file

@ -1,6 +1,6 @@
{
"name": "@docusaurus/cssnano-preset",
"version": "3.0.1",
"version": "3.1.0",
"description": "Advanced cssnano preset for maximum optimization.",
"main": "lib/index.js",
"license": "MIT",

View file

@ -1,6 +1,6 @@
{
"name": "@docusaurus/logger",
"version": "3.0.1",
"version": "3.1.0",
"description": "An encapsulated logger for semantically formatting console messages.",
"main": "./lib/index.js",
"repository": {

View file

@ -1,6 +1,6 @@
{
"name": "@docusaurus/mdx-loader",
"version": "3.0.1",
"version": "3.1.0",
"description": "Docusaurus Loader for MDX",
"main": "lib/index.js",
"types": "lib/index.d.ts",
@ -20,9 +20,9 @@
"dependencies": {
"@babel/parser": "^7.22.7",
"@babel/traverse": "^7.22.8",
"@docusaurus/logger": "3.0.1",
"@docusaurus/utils": "3.0.1",
"@docusaurus/utils-validation": "3.0.1",
"@docusaurus/logger": "3.1.0",
"@docusaurus/utils": "3.1.0",
"@docusaurus/utils-validation": "3.1.0",
"@mdx-js/mdx": "^3.0.0",
"@slorber/remark-comment": "^1.0.0",
"escape-html": "^1.0.3",
@ -46,7 +46,7 @@
"webpack": "^5.88.1"
},
"devDependencies": {
"@docusaurus/types": "3.0.1",
"@docusaurus/types": "3.1.0",
"@types/escape-html": "^1.0.2",
"@types/mdast": "^4.0.2",
"@types/stringify-object": "^3.3.1",

View file

@ -8,7 +8,7 @@
import fs from 'fs-extra';
import logger from '@docusaurus/logger';
import {
parseFrontMatter,
DEFAULT_PARSE_FRONT_MATTER,
escapePath,
getFileLoaderUtils,
getWebpackLoaderCompilerName,
@ -133,7 +133,7 @@ function extractContentTitleData(data: {
export async function mdxLoader(
this: LoaderContext<Options>,
fileString: string,
fileContent: string,
): Promise<void> {
const compilerName = getWebpackLoaderCompilerName(this);
const callback = this.async();
@ -143,11 +143,15 @@ export async function mdxLoader(
ensureMarkdownConfig(reqOptions);
const {frontMatter} = parseFrontMatter(fileString);
const {frontMatter} = await reqOptions.markdownConfig.parseFrontMatter({
filePath,
fileContent,
defaultParseFrontMatter: DEFAULT_PARSE_FRONT_MATTER,
});
const mdxFrontMatter = validateMDXFrontMatter(frontMatter.mdx);
const preprocessedContent = preprocessor({
fileContent: fileString,
fileContent,
filePath,
admonitions: reqOptions.admonitions,
markdownConfig: reqOptions.markdownConfig,

View file

@ -165,6 +165,7 @@ async function createProcessorFactory() {
const mdxProcessor = createMdxProcessor({
...processorOptions,
remarkRehypeOptions: options.markdownConfig.remarkRehypeOptions,
format,
});

View file

@ -12,17 +12,17 @@ exports[`transformAsset plugin pathname protocol 1`] = `
exports[`transformAsset plugin transform md links to <a /> 1`] = `
"[asset](https://example.com/asset.pdf)
<a target="_blank" href={require("!<PROJECT_ROOT>/node_modules/file-loader/dist/cjs.js?name=assets/files/[name]-[contenthash].[ext]!./asset.pdf").default} />
<a target="_blank" data-noBrokenLinkCheck={true} href={require("!<PROJECT_ROOT>/node_modules/file-loader/dist/cjs.js?name=assets/files/[name]-[contenthash].[ext]!./asset.pdf").default} />
<a target="_blank" href={require("!<PROJECT_ROOT>/node_modules/file-loader/dist/cjs.js?name=assets/files/[name]-[contenthash].[ext]!./asset.pdf").default}>asset</a>
<a target="_blank" data-noBrokenLinkCheck={true} href={require("!<PROJECT_ROOT>/node_modules/file-loader/dist/cjs.js?name=assets/files/[name]-[contenthash].[ext]!./asset.pdf").default}>asset</a>
in paragraph <a target="_blank" href={require("!<PROJECT_ROOT>/node_modules/file-loader/dist/cjs.js?name=assets/files/[name]-[contenthash].[ext]!./asset.pdf").default}>asset</a>
in paragraph <a target="_blank" data-noBrokenLinkCheck={true} href={require("!<PROJECT_ROOT>/node_modules/file-loader/dist/cjs.js?name=assets/files/[name]-[contenthash].[ext]!./asset.pdf").default}>asset</a>
<a target="_blank" href={require("!<PROJECT_ROOT>/node_modules/file-loader/dist/cjs.js?name=assets/files/[name]-[contenthash].[ext]!./asset (2).pdf").default}>asset with URL encoded chars</a>
<a target="_blank" data-noBrokenLinkCheck={true} href={require("!<PROJECT_ROOT>/node_modules/file-loader/dist/cjs.js?name=assets/files/[name]-[contenthash].[ext]!./asset (2).pdf").default}>asset with URL encoded chars</a>
<a target="_blank" href={require("!<PROJECT_ROOT>/node_modules/file-loader/dist/cjs.js?name=assets/files/[name]-[contenthash].[ext]!./asset.pdf").default + '#page=2'}>asset with hash</a>
<a target="_blank" data-noBrokenLinkCheck={true} href={require("!<PROJECT_ROOT>/node_modules/file-loader/dist/cjs.js?name=assets/files/[name]-[contenthash].[ext]!./asset.pdf").default + '#page=2'}>asset with hash</a>
<a target="_blank" href={require("!<PROJECT_ROOT>/node_modules/file-loader/dist/cjs.js?name=assets/files/[name]-[contenthash].[ext]!./asset.pdf").default} title="Title">asset</a>
<a target="_blank" data-noBrokenLinkCheck={true} href={require("!<PROJECT_ROOT>/node_modules/file-loader/dist/cjs.js?name=assets/files/[name]-[contenthash].[ext]!./asset.pdf").default} title="Title">asset</a>
[page](noUrl.md)
@ -36,24 +36,24 @@ in paragraph <a target="_blank" href={require("!<PROJECT_ROOT>/node_modules/file
[assets](/github/!file-loader!/assets.pdf)
<a target="_blank" href={require("!<PROJECT_ROOT>/node_modules/file-loader/dist/cjs.js?name=assets/files/[name]-[contenthash].[ext]!./asset.pdf").default}>asset</a>
<a target="_blank" data-noBrokenLinkCheck={true} href={require("!<PROJECT_ROOT>/node_modules/file-loader/dist/cjs.js?name=assets/files/[name]-[contenthash].[ext]!./asset.pdf").default}>asset</a>
<a target="_blank" href={require("!<PROJECT_ROOT>/node_modules/file-loader/dist/cjs.js?name=assets/files/[name]-[contenthash].[ext]!./static2/asset2.pdf").default}>asset2</a>
<a target="_blank" data-noBrokenLinkCheck={true} href={require("!<PROJECT_ROOT>/node_modules/file-loader/dist/cjs.js?name=assets/files/[name]-[contenthash].[ext]!./static2/asset2.pdf").default}>asset2</a>
<a target="_blank" href={require("!<PROJECT_ROOT>/node_modules/file-loader/dist/cjs.js?name=assets/files/[name]-[contenthash].[ext]!./static/staticAsset.pdf").default}>staticAsset.pdf</a>
<a target="_blank" data-noBrokenLinkCheck={true} href={require("!<PROJECT_ROOT>/node_modules/file-loader/dist/cjs.js?name=assets/files/[name]-[contenthash].[ext]!./static/staticAsset.pdf").default}>staticAsset.pdf</a>
<a target="_blank" href={require("!<PROJECT_ROOT>/node_modules/file-loader/dist/cjs.js?name=assets/files/[name]-[contenthash].[ext]!./static/staticAsset.pdf").default}>@site/static/staticAsset.pdf</a>
<a target="_blank" data-noBrokenLinkCheck={true} href={require("!<PROJECT_ROOT>/node_modules/file-loader/dist/cjs.js?name=assets/files/[name]-[contenthash].[ext]!./static/staticAsset.pdf").default}>@site/static/staticAsset.pdf</a>
<a target="_blank" href={require("!<PROJECT_ROOT>/node_modules/file-loader/dist/cjs.js?name=assets/files/[name]-[contenthash].[ext]!./static/staticAsset.pdf").default + '#page=2'} title="Title">@site/static/staticAsset.pdf</a>
<a target="_blank" data-noBrokenLinkCheck={true} href={require("!<PROJECT_ROOT>/node_modules/file-loader/dist/cjs.js?name=assets/files/[name]-[contenthash].[ext]!./static/staticAsset.pdf").default + '#page=2'} title="Title">@site/static/staticAsset.pdf</a>
<a target="_blank" href={require("!<PROJECT_ROOT>/node_modules/file-loader/dist/cjs.js?name=assets/files/[name]-[contenthash].[ext]!./static/staticAsset.pdf").default}>Just staticAsset.pdf</a>, and <a target="_blank" href={require("!<PROJECT_ROOT>/node_modules/file-loader/dist/cjs.js?name=assets/files/[name]-[contenthash].[ext]!./static/staticAsset.pdf").default}>**awesome** staticAsset 2.pdf 'It is really "AWESOME"'</a>, but also <a target="_blank" href={require("!<PROJECT_ROOT>/node_modules/file-loader/dist/cjs.js?name=assets/files/[name]-[contenthash].[ext]!./static/staticAsset.pdf").default}>coded \`staticAsset 3.pdf\`</a>
<a target="_blank" data-noBrokenLinkCheck={true} href={require("!<PROJECT_ROOT>/node_modules/file-loader/dist/cjs.js?name=assets/files/[name]-[contenthash].[ext]!./static/staticAsset.pdf").default}>Just staticAsset.pdf</a>, and <a target="_blank" data-noBrokenLinkCheck={true} href={require("!<PROJECT_ROOT>/node_modules/file-loader/dist/cjs.js?name=assets/files/[name]-[contenthash].[ext]!./static/staticAsset.pdf").default}>**awesome** staticAsset 2.pdf 'It is really "AWESOME"'</a>, but also <a target="_blank" data-noBrokenLinkCheck={true} href={require("!<PROJECT_ROOT>/node_modules/file-loader/dist/cjs.js?name=assets/files/[name]-[contenthash].[ext]!./static/staticAsset.pdf").default}>coded \`staticAsset 3.pdf\`</a>
<a target="_blank" href={require("!<PROJECT_ROOT>/node_modules/file-loader/dist/cjs.js?name=assets/files/[name]-[contenthash].[ext]!./static/staticAssetImage.png").default}><img alt="Clickable Docusaurus logo" src={require("!<PROJECT_ROOT>/node_modules/url-loader/dist/cjs.js?limit=10000&name=assets/images/[name]-[contenthash].[ext]&fallback=<PROJECT_ROOT>/node_modules/file-loader/dist/cjs.js!./static/staticAssetImage.png").default} width="200" height="200" /></a>
<a target="_blank" data-noBrokenLinkCheck={true} href={require("!<PROJECT_ROOT>/node_modules/file-loader/dist/cjs.js?name=assets/files/[name]-[contenthash].[ext]!./static/staticAssetImage.png").default}><img alt="Clickable Docusaurus logo" src={require("!<PROJECT_ROOT>/node_modules/url-loader/dist/cjs.js?limit=10000&name=assets/images/[name]-[contenthash].[ext]&fallback=<PROJECT_ROOT>/node_modules/file-loader/dist/cjs.js!./static/staticAssetImage.png").default} width="200" height="200" /></a>
<a target="_blank" href={require("!<PROJECT_ROOT>/node_modules/file-loader/dist/cjs.js?name=assets/files/[name]-[contenthash].[ext]!./asset.pdf").default}><span style={{color: "red"}}>Stylized link to asset file</span></a>
<a target="_blank" data-noBrokenLinkCheck={true} href={require("!<PROJECT_ROOT>/node_modules/file-loader/dist/cjs.js?name=assets/files/[name]-[contenthash].[ext]!./asset.pdf").default}><span style={{color: "red"}}>Stylized link to asset file</span></a>
<a target="_blank" href={require("./data.raw!=!<PROJECT_ROOT>/node_modules/file-loader/dist/cjs.js?name=assets/files/[name]-[contenthash].[ext]!./data.json").default}>JSON</a>
<a target="_blank" data-noBrokenLinkCheck={true} href={require("./data.raw!=!<PROJECT_ROOT>/node_modules/file-loader/dist/cjs.js?name=assets/files/[name]-[contenthash].[ext]!./data.json").default}>JSON</a>
<a target="_blank" href={require("./static/static-json.raw!=!<PROJECT_ROOT>/node_modules/file-loader/dist/cjs.js?name=assets/files/[name]-[contenthash].[ext]!./static/static-json.json").default}>static JSON</a>
<a target="_blank" data-noBrokenLinkCheck={true} href={require("./static/static-json.raw!=!<PROJECT_ROOT>/node_modules/file-loader/dist/cjs.js?name=assets/files/[name]-[contenthash].[ext]!./static/static-json.json").default}>static JSON</a>
"
`;

View file

@ -73,6 +73,34 @@ async function toAssetRequireNode(
value: '_blank',
});
// Assets are not routes, and are required by Webpack already
// They should not trigger the broken link checker
attributes.push({
type: 'mdxJsxAttribute',
name: 'data-noBrokenLinkCheck',
value: {
type: 'mdxJsxAttributeValueExpression',
value: 'true',
data: {
estree: {
type: 'Program',
body: [
{
type: 'ExpressionStatement',
expression: {
type: 'Literal',
value: true,
raw: 'true',
},
},
],
sourceType: 'module',
comments: [],
},
},
},
});
attributes.push({
type: 'mdxJsxAttribute',
name: 'href',

View file

@ -1,6 +1,6 @@
{
"name": "@docusaurus/module-type-aliases",
"version": "3.0.1",
"version": "3.1.0",
"description": "Docusaurus module type aliases.",
"types": "./src/index.d.ts",
"publishConfig": {
@ -13,7 +13,7 @@
},
"dependencies": {
"@docusaurus/react-loadable": "5.5.2",
"@docusaurus/types": "3.0.1",
"@docusaurus/types": "3.1.0",
"@types/history": "^4.7.11",
"@types/react": "*",
"@types/react-router-config": "*",

View file

@ -260,6 +260,15 @@ declare module '@docusaurus/useRouteContext' {
export default function useRouteContext(): PluginRouteContext;
}
declare module '@docusaurus/useBrokenLinks' {
export type BrokenLinks = {
collectLink: (link: string) => void;
collectAnchor: (anchor: string) => void;
};
export default function useBrokenLinks(): BrokenLinks;
}
declare module '@docusaurus/useIsBrowser' {
export default function useIsBrowser(): boolean;
}
@ -356,7 +365,9 @@ declare module '@docusaurus/useGlobalData' {
declare module '*.svg' {
import type {ComponentType, SVGProps} from 'react';
const ReactComponent: ComponentType<SVGProps<SVGSVGElement>>;
const ReactComponent: ComponentType<
SVGProps<SVGSVGElement> & {title?: string}
>;
export default ReactComponent;
}

View file

@ -1,6 +1,6 @@
{
"name": "@docusaurus/plugin-client-redirects",
"version": "3.0.1",
"version": "3.1.0",
"description": "Client redirects plugin for Docusaurus.",
"main": "lib/index.js",
"types": "lib/index.d.ts",
@ -18,18 +18,18 @@
},
"license": "MIT",
"dependencies": {
"@docusaurus/core": "3.0.1",
"@docusaurus/logger": "3.0.1",
"@docusaurus/utils": "3.0.1",
"@docusaurus/utils-common": "3.0.1",
"@docusaurus/utils-validation": "3.0.1",
"@docusaurus/core": "3.1.0",
"@docusaurus/logger": "3.1.0",
"@docusaurus/utils": "3.1.0",
"@docusaurus/utils-common": "3.1.0",
"@docusaurus/utils-validation": "3.1.0",
"eta": "^2.2.0",
"fs-extra": "^11.1.1",
"lodash": "^4.17.21",
"tslib": "^2.6.0"
},
"devDependencies": {
"@docusaurus/types": "3.0.1"
"@docusaurus/types": "3.1.0"
},
"peerDependencies": {
"react": "^18.0.0",

View file

@ -1,6 +1,6 @@
{
"name": "@docusaurus/plugin-content-blog",
"version": "3.0.1",
"version": "3.1.0",
"description": "Blog plugin for Docusaurus.",
"main": "lib/index.js",
"types": "src/plugin-content-blog.d.ts",
@ -19,13 +19,13 @@
},
"license": "MIT",
"dependencies": {
"@docusaurus/core": "3.0.1",
"@docusaurus/logger": "3.0.1",
"@docusaurus/mdx-loader": "3.0.1",
"@docusaurus/types": "3.0.1",
"@docusaurus/utils": "3.0.1",
"@docusaurus/utils-common": "3.0.1",
"@docusaurus/utils-validation": "3.0.1",
"@docusaurus/core": "3.1.0",
"@docusaurus/logger": "3.1.0",
"@docusaurus/mdx-loader": "3.1.0",
"@docusaurus/types": "3.1.0",
"@docusaurus/utils": "3.1.0",
"@docusaurus/utils-common": "3.1.0",
"@docusaurus/utils-validation": "3.1.0",
"cheerio": "^1.0.0-rc.12",
"feed": "^4.2.2",
"fs-extra": "^11.1.1",

View file

@ -19,6 +19,7 @@ describe('getBlogPostAuthors', () => {
getBlogPostAuthors({
frontMatter: {},
authorsMap: undefined,
baseUrl: '/',
}),
).toEqual([]);
expect(
@ -27,6 +28,7 @@ describe('getBlogPostAuthors', () => {
authors: [],
},
authorsMap: undefined,
baseUrl: '/',
}),
).toEqual([]);
});
@ -38,6 +40,7 @@ describe('getBlogPostAuthors', () => {
author: 'Sébastien Lorber',
},
authorsMap: undefined,
baseUrl: '/',
}),
).toEqual([{name: 'Sébastien Lorber'}]);
expect(
@ -46,6 +49,7 @@ describe('getBlogPostAuthors', () => {
authorTitle: 'maintainer',
},
authorsMap: undefined,
baseUrl: '/',
}),
).toEqual([{title: 'maintainer'}]);
expect(
@ -54,8 +58,27 @@ describe('getBlogPostAuthors', () => {
authorImageURL: 'https://github.com/slorber.png',
},
authorsMap: undefined,
baseUrl: '/',
}),
).toEqual([{imageURL: 'https://github.com/slorber.png'}]);
expect(
getBlogPostAuthors({
frontMatter: {
authorImageURL: '/img/slorber.png',
},
authorsMap: undefined,
baseUrl: '/',
}),
).toEqual([{imageURL: '/img/slorber.png'}]);
expect(
getBlogPostAuthors({
frontMatter: {
authorImageURL: '/img/slorber.png',
},
authorsMap: undefined,
baseUrl: '/baseURL',
}),
).toEqual([{imageURL: '/baseURL/img/slorber.png'}]);
expect(
getBlogPostAuthors({
frontMatter: {
@ -68,6 +91,7 @@ describe('getBlogPostAuthors', () => {
authorURL: 'https://github.com/slorber2',
},
authorsMap: undefined,
baseUrl: '/',
}),
).toEqual([
{
@ -86,8 +110,69 @@ describe('getBlogPostAuthors', () => {
authors: 'slorber',
},
authorsMap: {slorber: {name: 'Sébastien Lorber'}},
baseUrl: '/',
}),
).toEqual([{key: 'slorber', name: 'Sébastien Lorber'}]);
expect(
getBlogPostAuthors({
frontMatter: {
authors: 'slorber',
},
authorsMap: {
slorber: {
name: 'Sébastien Lorber',
imageURL: 'https://github.com/slorber.png',
},
},
baseUrl: '/',
}),
).toEqual([
{
key: 'slorber',
name: 'Sébastien Lorber',
imageURL: 'https://github.com/slorber.png',
},
]);
expect(
getBlogPostAuthors({
frontMatter: {
authors: 'slorber',
},
authorsMap: {
slorber: {
name: 'Sébastien Lorber',
imageURL: '/img/slorber.png',
},
},
baseUrl: '/',
}),
).toEqual([
{
key: 'slorber',
name: 'Sébastien Lorber',
imageURL: '/img/slorber.png',
},
]);
expect(
getBlogPostAuthors({
frontMatter: {
authors: 'slorber',
},
authorsMap: {
slorber: {
name: 'Sébastien Lorber',
imageURL: '/img/slorber.png',
},
},
baseUrl: '/baseUrl',
}),
).toEqual([
{
key: 'slorber',
name: 'Sébastien Lorber',
imageURL: '/baseUrl/img/slorber.png',
},
]);
});
it('can read authors string[]', () => {
@ -100,6 +185,7 @@ describe('getBlogPostAuthors', () => {
slorber: {name: 'Sébastien Lorber', title: 'maintainer'},
yangshun: {name: 'Yangshun Tay'},
},
baseUrl: '/',
}),
).toEqual([
{key: 'slorber', name: 'Sébastien Lorber', title: 'maintainer'},
@ -114,6 +200,7 @@ describe('getBlogPostAuthors', () => {
authors: {name: 'Sébastien Lorber', title: 'maintainer'},
},
authorsMap: undefined,
baseUrl: '/',
}),
).toEqual([{name: 'Sébastien Lorber', title: 'maintainer'}]);
});
@ -128,6 +215,7 @@ describe('getBlogPostAuthors', () => {
],
},
authorsMap: undefined,
baseUrl: '/',
}),
).toEqual([
{name: 'Sébastien Lorber', title: 'maintainer'},
@ -153,6 +241,7 @@ describe('getBlogPostAuthors', () => {
slorber: {name: 'Sébastien Lorber', title: 'maintainer'},
yangshun: {name: 'Yangshun Tay', title: 'Yangshun title original'},
},
baseUrl: '/',
}),
).toEqual([
{key: 'slorber', name: 'Sébastien Lorber', title: 'maintainer'},
@ -173,6 +262,7 @@ describe('getBlogPostAuthors', () => {
authors: 'slorber',
},
authorsMap: undefined,
baseUrl: '/',
}),
).toThrowErrorMatchingInlineSnapshot(`
"Can't reference blog post authors by a key (such as 'slorber') because no authors map file could be loaded.
@ -187,6 +277,7 @@ describe('getBlogPostAuthors', () => {
authors: 'slorber',
},
authorsMap: {},
baseUrl: '/',
}),
).toThrowErrorMatchingInlineSnapshot(`
"Can't reference blog post authors by a key (such as 'slorber') because no authors map file could be loaded.
@ -205,6 +296,7 @@ describe('getBlogPostAuthors', () => {
yangshun: {name: 'Yangshun Tay'},
jmarcey: {name: 'Joel Marcey'},
},
baseUrl: '/',
}),
).toThrowErrorMatchingInlineSnapshot(`
"Blog author with key "slorber" not found in the authors map file.
@ -225,6 +317,7 @@ describe('getBlogPostAuthors', () => {
yangshun: {name: 'Yangshun Tay'},
jmarcey: {name: 'Joel Marcey'},
},
baseUrl: '/',
}),
).toThrowErrorMatchingInlineSnapshot(`
"Blog author with key "slorber" not found in the authors map file.
@ -245,6 +338,7 @@ describe('getBlogPostAuthors', () => {
yangshun: {name: 'Yangshun Tay'},
jmarcey: {name: 'Joel Marcey'},
},
baseUrl: '/',
}),
).toThrowErrorMatchingInlineSnapshot(`
"Blog author with key "slorber" not found in the authors map file.
@ -262,6 +356,7 @@ describe('getBlogPostAuthors', () => {
author: 'Yangshun Tay',
},
authorsMap: undefined,
baseUrl: '/',
}),
).toThrowErrorMatchingInlineSnapshot(`
"To declare blog post authors, use the 'authors' front matter in priority.
@ -275,6 +370,7 @@ describe('getBlogPostAuthors', () => {
author_title: 'legacy title',
},
authorsMap: {slorber: {name: 'Sébastien Lorber'}},
baseUrl: '/',
}),
).toThrowErrorMatchingInlineSnapshot(`
"To declare blog post authors, use the 'authors' front matter in priority.

View file

@ -8,6 +8,7 @@
import {jest} from '@jest/globals';
import path from 'path';
import fs from 'fs-extra';
import {DEFAULT_PARSE_FRONT_MATTER} from '@docusaurus/utils';
import {DEFAULT_OPTIONS} from '../options';
import {generateBlogPosts} from '../blogUtils';
import {createBlogFeedFiles} from '../feed';
@ -31,6 +32,8 @@ const DefaultI18N: I18n = {
},
};
const markdown = {parseFrontMatter: DEFAULT_PARSE_FRONT_MATTER};
function getBlogContentPaths(siteDir: string): BlogContentPaths {
return {
contentPath: path.resolve(siteDir, 'blog'),
@ -72,6 +75,7 @@ describe.each(['atom', 'rss', 'json'])('%s', (feedType) => {
baseUrl: '/',
url: 'https://docusaurus.io',
favicon: 'image/favicon.ico',
markdown,
};
const outDir = path.join(siteDir, 'build-snap');
@ -110,6 +114,7 @@ describe.each(['atom', 'rss', 'json'])('%s', (feedType) => {
baseUrl: '/myBaseUrl/',
url: 'https://docusaurus.io',
favicon: 'image/favicon.ico',
markdown,
};
// Build is quite difficult to mock, so we built the blog beforehand and
@ -152,6 +157,7 @@ describe.each(['atom', 'rss', 'json'])('%s', (feedType) => {
baseUrl: '/myBaseUrl/',
url: 'https://docusaurus.io',
favicon: 'image/favicon.ico',
markdown,
};
// Build is quite difficult to mock, so we built the blog beforehand and
@ -204,6 +210,7 @@ describe.each(['atom', 'rss', 'json'])('%s', (feedType) => {
baseUrl: '/myBaseUrl/',
url: 'https://docusaurus.io',
favicon: 'image/favicon.ico',
markdown,
};
// Build is quite difficult to mock, so we built the blog beforehand and

View file

@ -16,6 +16,7 @@ import type {
LoadContext,
I18n,
Validate,
MarkdownConfig,
} from '@docusaurus/types';
import type {
BlogPost,
@ -24,6 +25,24 @@ import type {
EditUrlFunction,
} from '@docusaurus/plugin-content-blog';
const markdown: MarkdownConfig = {
format: 'mdx',
mermaid: true,
mdx1Compat: {
comments: true,
headingIds: true,
admonitions: true,
},
parseFrontMatter: async (params) => {
// Reuse the default parser
const result = await params.defaultParseFrontMatter(params);
if (result.frontMatter.title === 'Complex Slug') {
result.frontMatter.custom_frontMatter = 'added by parseFrontMatter';
}
return result;
},
};
function findByTitle(
blogPosts: BlogPost[],
title: string,
@ -81,6 +100,7 @@ const getPlugin = async (
title: 'Hello',
baseUrl: '/',
url: 'https://docusaurus.io',
markdown,
} as DocusaurusConfig;
return pluginContentBlog(
{
@ -242,6 +262,7 @@ describe('blog plugin', () => {
slug: '/hey/my super path/héllô',
title: 'Complex Slug',
tags: ['date', 'complex'],
custom_frontMatter: 'added by parseFrontMatter',
},
tags: [
{

View file

@ -5,7 +5,7 @@
* LICENSE file in the root directory of this source tree.
*/
import {getDataFileData} from '@docusaurus/utils';
import {getDataFileData, normalizeUrl} from '@docusaurus/utils';
import {Joi, URISchema} from '@docusaurus/utils-validation';
import type {BlogContentPaths} from './types';
import type {
@ -68,17 +68,37 @@ export async function getAuthorsMap(params: {
type AuthorsParam = {
frontMatter: BlogPostFrontMatter;
authorsMap: AuthorsMap | undefined;
baseUrl: string;
};
function normalizeImageUrl({
imageURL,
baseUrl,
}: {
imageURL: string | undefined;
baseUrl: string;
}) {
return imageURL?.startsWith('/')
? normalizeUrl([baseUrl, imageURL])
: imageURL;
}
// Legacy v1/early-v2 front matter fields
// We may want to deprecate those in favor of using only frontMatter.authors
function getFrontMatterAuthorLegacy(
frontMatter: BlogPostFrontMatter,
): Author | undefined {
function getFrontMatterAuthorLegacy({
baseUrl,
frontMatter,
}: {
baseUrl: string;
frontMatter: BlogPostFrontMatter;
}): Author | undefined {
const name = frontMatter.author;
const title = frontMatter.author_title ?? frontMatter.authorTitle;
const url = frontMatter.author_url ?? frontMatter.authorURL;
const imageURL = frontMatter.author_image_url ?? frontMatter.authorImageURL;
const imageURL = normalizeImageUrl({
imageURL: frontMatter.author_image_url ?? frontMatter.authorImageURL,
baseUrl,
});
if (name || title || url || imageURL) {
return {
@ -148,14 +168,26 @@ ${Object.keys(authorsMap)
return frontMatterAuthors.map(toAuthor);
}
function fixAuthorImageBaseURL(
authors: Author[],
{baseUrl}: {baseUrl: string},
) {
return authors.map((author) => ({
...author,
imageURL: normalizeImageUrl({imageURL: author.imageURL, baseUrl}),
}));
}
export function getBlogPostAuthors(params: AuthorsParam): Author[] {
const authorLegacy = getFrontMatterAuthorLegacy(params.frontMatter);
const authorLegacy = getFrontMatterAuthorLegacy(params);
const authors = getFrontMatterAuthors(params);
const updatedAuthors = fixAuthorImageBaseURL(authors, params);
if (authorLegacy) {
// Technically, we could allow mixing legacy/authors front matter, but do we
// really want to?
if (authors.length > 0) {
if (updatedAuthors.length > 0) {
throw new Error(
`To declare blog post authors, use the 'authors' front matter in priority.
Don't mix 'authors' with other existing 'author_*' front matter. Choose one or the other, not both at the same time.`,
@ -164,5 +196,5 @@ Don't mix 'authors' with other existing 'author_*' front matter. Choose one or t
return [authorLegacy];
}
return authors;
return updatedAuthors;
}

View file

@ -11,7 +11,7 @@ import _ from 'lodash';
import logger from '@docusaurus/logger';
import readingTime from 'reading-time';
import {
parseMarkdownString,
parseMarkdownFile,
normalizeUrl,
aliasedSitePath,
getEditUrl,
@ -29,7 +29,7 @@ import {
} from '@docusaurus/utils';
import {validateBlogPostFrontMatter} from './frontMatter';
import {type AuthorsMap, getAuthorsMap, getBlogPostAuthors} from './authors';
import type {LoadContext} from '@docusaurus/types';
import type {LoadContext, ParseFrontMatter} from '@docusaurus/types';
import type {
PluginOptions,
ReadingTimeFunction,
@ -180,10 +180,19 @@ function formatBlogPostDate(
}
}
async function parseBlogPostMarkdownFile(blogSourceAbsolute: string) {
const markdownString = await fs.readFile(blogSourceAbsolute, 'utf-8');
async function parseBlogPostMarkdownFile({
filePath,
parseFrontMatter,
}: {
filePath: string;
parseFrontMatter: ParseFrontMatter;
}) {
const fileContent = await fs.readFile(filePath, 'utf-8');
try {
const result = parseMarkdownString(markdownString, {
const result = await parseMarkdownFile({
filePath,
fileContent,
parseFrontMatter,
removeContentTitle: true,
});
return {
@ -191,7 +200,7 @@ async function parseBlogPostMarkdownFile(blogSourceAbsolute: string) {
frontMatter: validateBlogPostFrontMatter(result.frontMatter),
};
} catch (err) {
logger.error`Error while parsing blog post file path=${blogSourceAbsolute}.`;
logger.error`Error while parsing blog post file path=${filePath}.`;
throw err;
}
}
@ -207,7 +216,10 @@ async function processBlogSourceFile(
authorsMap?: AuthorsMap,
): Promise<BlogPost | undefined> {
const {
siteConfig: {baseUrl},
siteConfig: {
baseUrl,
markdown: {parseFrontMatter},
},
siteDir,
i18n,
} = context;
@ -228,7 +240,10 @@ async function processBlogSourceFile(
const blogSourceAbsolute = path.join(blogDirPath, blogSourceRelative);
const {frontMatter, content, contentTitle, excerpt} =
await parseBlogPostMarkdownFile(blogSourceAbsolute);
await parseBlogPostMarkdownFile({
filePath: blogSourceAbsolute,
parseFrontMatter,
});
const aliasedSource = aliasedSitePath(blogSourceAbsolute, siteDir);
@ -319,7 +334,7 @@ async function processBlogSourceFile(
routeBasePath,
tagsRouteBasePath,
]);
const authors = getBlogPostAuthors({authorsMap, frontMatter});
const authors = getBlogPostAuthors({authorsMap, frontMatter, baseUrl});
return {
id: slug,

View file

@ -1,6 +1,6 @@
{
"name": "@docusaurus/plugin-content-docs",
"version": "3.0.1",
"version": "3.1.0",
"description": "Docs plugin for Docusaurus.",
"main": "lib/index.js",
"sideEffects": false,
@ -35,13 +35,13 @@
},
"license": "MIT",
"dependencies": {
"@docusaurus/core": "3.0.1",
"@docusaurus/logger": "3.0.1",
"@docusaurus/mdx-loader": "3.0.1",
"@docusaurus/module-type-aliases": "3.0.1",
"@docusaurus/types": "3.0.1",
"@docusaurus/utils": "3.0.1",
"@docusaurus/utils-validation": "3.0.1",
"@docusaurus/core": "3.1.0",
"@docusaurus/logger": "3.1.0",
"@docusaurus/mdx-loader": "3.1.0",
"@docusaurus/module-type-aliases": "3.1.0",
"@docusaurus/types": "3.1.0",
"@docusaurus/utils": "3.1.0",
"@docusaurus/utils-validation": "3.1.0",
"@types/react-router-config": "^5.0.7",
"combine-promises": "^1.1.0",
"fs-extra": "^11.1.1",

View file

@ -11,4 +11,16 @@ module.exports = {
url: 'https://your-docusaurus-site.example.com',
baseUrl: '/',
favicon: 'img/favicon.ico',
markdown: {
parseFrontMatter: async (params) => {
// Reuse the default parser
const result = await params.defaultParseFrontMatter(params);
if (result.frontMatter.last_update?.author) {
result.frontMatter.last_update.author =
result.frontMatter.last_update.author +
' (processed by parseFrontMatter)';
}
return result;
},
},
};

View file

@ -463,7 +463,7 @@ exports[`simple website content: data 1`] = `
"frontMatter": {
"title": "Custom Last Update",
"last_update": {
"author": "Custom Author",
"author": "Custom Author (processed by parseFrontMatter)",
"date": "1/1/2000"
}
}
@ -686,7 +686,7 @@ exports[`simple website content: data 1`] = `
"frontMatter": {
"title": "Last Update Author Only",
"last_update": {
"author": "Custom Author"
"author": "Custom Author (processed by parseFrontMatter)"
}
}
}",

View file

@ -567,14 +567,14 @@ describe('simple site', () => {
description: 'Custom last update',
frontMatter: {
last_update: {
author: 'Custom Author',
author: 'Custom Author (processed by parseFrontMatter)',
date: '1/1/2000',
},
title: 'Custom Last Update',
},
lastUpdatedAt: new Date('1/1/2000').getTime() / 1000,
formattedLastUpdatedAt: 'Jan 1, 2000',
lastUpdatedBy: 'Custom Author',
lastUpdatedBy: 'Custom Author (processed by parseFrontMatter)',
sidebarPosition: undefined,
tags: [],
unlisted: false,
@ -607,13 +607,13 @@ describe('simple site', () => {
description: 'Only custom author, so it will still use the date from Git',
frontMatter: {
last_update: {
author: 'Custom Author',
author: 'Custom Author (processed by parseFrontMatter)',
},
title: 'Last Update Author Only',
},
lastUpdatedAt: 1539502055,
formattedLastUpdatedAt: 'Oct 14, 2018',
lastUpdatedBy: 'Custom Author',
lastUpdatedBy: 'Custom Author (processed by parseFrontMatter)',
sidebarPosition: undefined,
tags: [],
unlisted: false,
@ -685,7 +685,7 @@ describe('simple site', () => {
description: 'Custom last update',
frontMatter: {
last_update: {
author: 'Custom Author',
author: 'Custom Author (processed by parseFrontMatter)',
date: '1/1/2000',
},
title: 'Custom Last Update',

View file

@ -15,7 +15,7 @@ import {
getFolderContainingFile,
getContentPathList,
normalizeUrl,
parseMarkdownString,
parseMarkdownFile,
posixPath,
Globby,
normalizeFrontMatterTags,
@ -140,13 +140,23 @@ async function doProcessDocMetadata({
env: DocEnv;
}): Promise<DocMetadataBase> {
const {source, content, contentPath, filePath} = docFile;
const {siteDir, i18n} = context;
const {
siteDir,
i18n,
siteConfig: {
markdown: {parseFrontMatter},
},
} = context;
const {
frontMatter: unsafeFrontMatter,
contentTitle,
excerpt,
} = parseMarkdownString(content);
} = await parseMarkdownFile({
filePath,
fileContent: content,
parseFrontMatter,
});
const frontMatter = validateDocFrontMatter(unsafeFrontMatter);
const {

View file

@ -1,6 +1,6 @@
{
"name": "@docusaurus/plugin-content-pages",
"version": "3.0.1",
"version": "3.1.0",
"description": "Pages plugin for Docusaurus.",
"main": "lib/index.js",
"types": "src/plugin-content-pages.d.ts",
@ -18,11 +18,11 @@
},
"license": "MIT",
"dependencies": {
"@docusaurus/core": "3.0.1",
"@docusaurus/mdx-loader": "3.0.1",
"@docusaurus/types": "3.0.1",
"@docusaurus/utils": "3.0.1",
"@docusaurus/utils-validation": "3.0.1",
"@docusaurus/core": "3.1.0",
"@docusaurus/mdx-loader": "3.1.0",
"@docusaurus/types": "3.1.0",
"@docusaurus/utils": "3.1.0",
"@docusaurus/utils-validation": "3.1.0",
"fs-extra": "^11.1.1",
"tslib": "^2.6.0",
"webpack": "^5.88.1"

View file

@ -11,4 +11,11 @@ module.exports = {
url: 'https://your-docusaurus-site.example.com',
baseUrl: '/',
favicon: 'img/favicon.ico',
markdown: {
parseFrontMatter: async (params) => {
const result = await params.defaultParseFrontMatter(params);
result.frontMatter.custom_frontMatter = 'added by parseFrontMatter';
return result;
},
},
};

View file

@ -14,7 +14,9 @@ exports[`docusaurus-plugin-content-pages loads simple pages 1`] = `
},
{
"description": "Markdown index page",
"frontMatter": {},
"frontMatter": {
"custom_frontMatter": "added by parseFrontMatter",
},
"permalink": "/hello/",
"source": "@site/src/pages/hello/index.md",
"title": "Index",
@ -24,6 +26,7 @@ exports[`docusaurus-plugin-content-pages loads simple pages 1`] = `
{
"description": "my MDX page",
"frontMatter": {
"custom_frontMatter": "added by parseFrontMatter",
"description": "my MDX page",
"title": "MDX page",
},
@ -40,7 +43,9 @@ exports[`docusaurus-plugin-content-pages loads simple pages 1`] = `
},
{
"description": "translated Markdown page",
"frontMatter": {},
"frontMatter": {
"custom_frontMatter": "added by parseFrontMatter",
},
"permalink": "/hello/translatedMd",
"source": "@site/src/pages/hello/translatedMd.md",
"title": undefined,
@ -69,7 +74,9 @@ exports[`docusaurus-plugin-content-pages loads simple pages with french translat
},
{
"description": "Markdown index page",
"frontMatter": {},
"frontMatter": {
"custom_frontMatter": "added by parseFrontMatter",
},
"permalink": "/fr/hello/",
"source": "@site/src/pages/hello/index.md",
"title": "Index",
@ -79,6 +86,7 @@ exports[`docusaurus-plugin-content-pages loads simple pages with french translat
{
"description": "my MDX page",
"frontMatter": {
"custom_frontMatter": "added by parseFrontMatter",
"description": "my MDX page",
"title": "MDX page",
},
@ -95,7 +103,9 @@ exports[`docusaurus-plugin-content-pages loads simple pages with french translat
},
{
"description": "translated Markdown page (fr)",
"frontMatter": {},
"frontMatter": {
"custom_frontMatter": "added by parseFrontMatter",
},
"permalink": "/fr/hello/translatedMd",
"source": "@site/i18n/fr/docusaurus-plugin-content-pages/hello/translatedMd.md",
"title": undefined,

View file

@ -19,7 +19,7 @@ import {
createAbsoluteFilePathMatcher,
normalizeUrl,
DEFAULT_PLUGIN_ID,
parseMarkdownString,
parseMarkdownFile,
isUnlisted,
isDraft,
} from '@docusaurus/utils';
@ -113,7 +113,11 @@ export default function pluginContentPages(
frontMatter: unsafeFrontMatter,
contentTitle,
excerpt,
} = parseMarkdownString(content);
} = await parseMarkdownFile({
filePath: source,
fileContent: content,
parseFrontMatter: siteConfig.markdown.parseFrontMatter,
});
const frontMatter = validatePageFrontMatter(unsafeFrontMatter);
if (isDraft({frontMatter})) {

View file

@ -1,6 +1,6 @@
{
"name": "@docusaurus/plugin-debug",
"version": "3.0.1",
"version": "3.1.0",
"description": "Debug plugin for Docusaurus.",
"main": "lib/index.js",
"types": "src/plugin-debug.d.ts",
@ -20,9 +20,9 @@
},
"license": "MIT",
"dependencies": {
"@docusaurus/core": "3.0.1",
"@docusaurus/types": "3.0.1",
"@docusaurus/utils": "3.0.1",
"@docusaurus/core": "3.1.0",
"@docusaurus/types": "3.1.0",
"@docusaurus/utils": "3.1.0",
"fs-extra": "^11.1.1",
"react-json-view-lite": "^1.2.0",
"tslib": "^2.6.0"

View file

@ -1,6 +1,6 @@
{
"name": "@docusaurus/plugin-google-analytics",
"version": "3.0.1",
"version": "3.1.0",
"description": "Global analytics (analytics.js) plugin for Docusaurus.",
"main": "lib/index.js",
"types": "lib/index.d.ts",
@ -18,9 +18,9 @@
},
"license": "MIT",
"dependencies": {
"@docusaurus/core": "3.0.1",
"@docusaurus/types": "3.0.1",
"@docusaurus/utils-validation": "3.0.1",
"@docusaurus/core": "3.1.0",
"@docusaurus/types": "3.1.0",
"@docusaurus/utils-validation": "3.1.0",
"tslib": "^2.6.0"
},
"peerDependencies": {

View file

@ -1,6 +1,6 @@
{
"name": "@docusaurus/plugin-google-gtag",
"version": "3.0.1",
"version": "3.1.0",
"description": "Global Site Tag (gtag.js) plugin for Docusaurus.",
"main": "lib/index.js",
"types": "lib/index.d.ts",
@ -18,9 +18,9 @@
},
"license": "MIT",
"dependencies": {
"@docusaurus/core": "3.0.1",
"@docusaurus/types": "3.0.1",
"@docusaurus/utils-validation": "3.0.1",
"@docusaurus/core": "3.1.0",
"@docusaurus/types": "3.1.0",
"@docusaurus/utils-validation": "3.1.0",
"@types/gtag.js": "^0.0.12",
"tslib": "^2.6.0"
},

View file

@ -1,6 +1,6 @@
{
"name": "@docusaurus/plugin-google-tag-manager",
"version": "3.0.1",
"version": "3.1.0",
"description": "Google Tag Manager (gtm.js) plugin for Docusaurus.",
"main": "lib/index.js",
"types": "lib/index.d.ts",
@ -18,9 +18,9 @@
},
"license": "MIT",
"dependencies": {
"@docusaurus/core": "3.0.1",
"@docusaurus/types": "3.0.1",
"@docusaurus/utils-validation": "3.0.1",
"@docusaurus/core": "3.1.0",
"@docusaurus/types": "3.1.0",
"@docusaurus/utils-validation": "3.1.0",
"tslib": "^2.6.0"
},
"peerDependencies": {

View file

@ -1,6 +1,6 @@
{
"name": "@docusaurus/plugin-ideal-image",
"version": "3.0.1",
"version": "3.1.0",
"description": "Docusaurus Plugin to generate an almost ideal image (responsive, lazy-loading, and low quality placeholder).",
"main": "lib/index.js",
"types": "src/plugin-ideal-image.d.ts",
@ -20,12 +20,12 @@
},
"license": "MIT",
"dependencies": {
"@docusaurus/core": "3.0.1",
"@docusaurus/lqip-loader": "3.0.1",
"@docusaurus/core": "3.1.0",
"@docusaurus/lqip-loader": "3.1.0",
"@docusaurus/responsive-loader": "^1.7.0",
"@docusaurus/theme-translations": "3.0.1",
"@docusaurus/types": "3.0.1",
"@docusaurus/utils-validation": "3.0.1",
"@docusaurus/theme-translations": "3.1.0",
"@docusaurus/types": "3.1.0",
"@docusaurus/utils-validation": "3.1.0",
"@slorber/react-ideal-image": "^0.0.12",
"react-waypoint": "^10.3.0",
"sharp": "^0.32.3",
@ -33,7 +33,7 @@
"webpack": "^5.88.1"
},
"devDependencies": {
"@docusaurus/module-type-aliases": "3.0.1",
"@docusaurus/module-type-aliases": "3.1.0",
"fs-extra": "^11.1.0"
},
"peerDependencies": {

View file

@ -1,6 +1,6 @@
{
"name": "@docusaurus/plugin-pwa",
"version": "3.0.1",
"version": "3.1.0",
"description": "Docusaurus Plugin to add PWA support.",
"main": "lib/index.js",
"types": "src/plugin-pwa.d.ts",
@ -22,12 +22,12 @@
"dependencies": {
"@babel/core": "^7.23.3",
"@babel/preset-env": "^7.23.3",
"@docusaurus/core": "3.0.1",
"@docusaurus/theme-common": "3.0.1",
"@docusaurus/theme-translations": "3.0.1",
"@docusaurus/types": "3.0.1",
"@docusaurus/utils": "3.0.1",
"@docusaurus/utils-validation": "3.0.1",
"@docusaurus/core": "3.1.0",
"@docusaurus/theme-common": "3.1.0",
"@docusaurus/theme-translations": "3.1.0",
"@docusaurus/types": "3.1.0",
"@docusaurus/utils": "3.1.0",
"@docusaurus/utils-validation": "3.1.0",
"babel-loader": "^9.1.3",
"clsx": "^2.0.0",
"core-js": "^3.31.1",
@ -36,12 +36,12 @@
"webpack": "^5.88.1",
"webpack-merge": "^5.9.0",
"webpackbar": "^5.0.2",
"workbox-build": "^6.6.1",
"workbox-precaching": "^6.6.1",
"workbox-window": "^6.6.1"
"workbox-build": "^7.0.0",
"workbox-precaching": "^7.0.0",
"workbox-window": "^7.0.0"
},
"devDependencies": {
"@docusaurus/module-type-aliases": "3.0.1",
"@docusaurus/module-type-aliases": "3.1.0",
"fs-extra": "^11.1.0"
},
"peerDependencies": {

View file

@ -1,6 +1,6 @@
{
"name": "@docusaurus/plugin-sitemap",
"version": "3.0.1",
"version": "3.1.0",
"description": "Simple sitemap generation plugin for Docusaurus.",
"main": "lib/index.js",
"types": "lib/index.d.ts",
@ -18,12 +18,12 @@
},
"license": "MIT",
"dependencies": {
"@docusaurus/core": "3.0.1",
"@docusaurus/logger": "3.0.1",
"@docusaurus/types": "3.0.1",
"@docusaurus/utils": "3.0.1",
"@docusaurus/utils-common": "3.0.1",
"@docusaurus/utils-validation": "3.0.1",
"@docusaurus/core": "3.1.0",
"@docusaurus/logger": "3.1.0",
"@docusaurus/types": "3.1.0",
"@docusaurus/utils": "3.1.0",
"@docusaurus/utils-common": "3.1.0",
"@docusaurus/utils-validation": "3.1.0",
"fs-extra": "^11.1.1",
"sitemap": "^7.1.1",
"tslib": "^2.6.0"

View file

@ -1,6 +1,6 @@
{
"name": "@docusaurus/preset-classic",
"version": "3.0.1",
"version": "3.1.0",
"description": "Classic preset for Docusaurus.",
"main": "lib/index.js",
"types": "lib/index.d.ts",
@ -18,19 +18,19 @@
},
"license": "MIT",
"dependencies": {
"@docusaurus/core": "3.0.1",
"@docusaurus/plugin-content-blog": "3.0.1",
"@docusaurus/plugin-content-docs": "3.0.1",
"@docusaurus/plugin-content-pages": "3.0.1",
"@docusaurus/plugin-debug": "3.0.1",
"@docusaurus/plugin-google-analytics": "3.0.1",
"@docusaurus/plugin-google-gtag": "3.0.1",
"@docusaurus/plugin-google-tag-manager": "3.0.1",
"@docusaurus/plugin-sitemap": "3.0.1",
"@docusaurus/theme-classic": "3.0.1",
"@docusaurus/theme-common": "3.0.1",
"@docusaurus/theme-search-algolia": "3.0.1",
"@docusaurus/types": "3.0.1"
"@docusaurus/core": "3.1.0",
"@docusaurus/plugin-content-blog": "3.1.0",
"@docusaurus/plugin-content-docs": "3.1.0",
"@docusaurus/plugin-content-pages": "3.1.0",
"@docusaurus/plugin-debug": "3.1.0",
"@docusaurus/plugin-google-analytics": "3.1.0",
"@docusaurus/plugin-google-gtag": "3.1.0",
"@docusaurus/plugin-google-tag-manager": "3.1.0",
"@docusaurus/plugin-sitemap": "3.1.0",
"@docusaurus/theme-classic": "3.1.0",
"@docusaurus/theme-common": "3.1.0",
"@docusaurus/theme-search-algolia": "3.1.0",
"@docusaurus/types": "3.1.0"
},
"peerDependencies": {
"react": "^18.0.0",

View file

@ -1,6 +1,6 @@
{
"name": "@docusaurus/remark-plugin-npm2yarn",
"version": "3.0.1",
"version": "3.1.0",
"description": "Remark plugin for converting npm commands to Yarn commands as tabs.",
"main": "lib/index.js",
"publishConfig": {

View file

@ -1,6 +1,6 @@
{
"name": "@docusaurus/theme-classic",
"version": "3.0.1",
"version": "3.1.0",
"description": "Classic theme for Docusaurus",
"main": "lib/index.js",
"types": "src/theme-classic.d.ts",
@ -20,18 +20,18 @@
"copy:watch": "node ../../admin/scripts/copyUntypedFiles.js --watch"
},
"dependencies": {
"@docusaurus/core": "3.0.1",
"@docusaurus/mdx-loader": "3.0.1",
"@docusaurus/module-type-aliases": "3.0.1",
"@docusaurus/plugin-content-blog": "3.0.1",
"@docusaurus/plugin-content-docs": "3.0.1",
"@docusaurus/plugin-content-pages": "3.0.1",
"@docusaurus/theme-common": "3.0.1",
"@docusaurus/theme-translations": "3.0.1",
"@docusaurus/types": "3.0.1",
"@docusaurus/utils": "3.0.1",
"@docusaurus/utils-common": "3.0.1",
"@docusaurus/utils-validation": "3.0.1",
"@docusaurus/core": "3.1.0",
"@docusaurus/mdx-loader": "3.1.0",
"@docusaurus/module-type-aliases": "3.1.0",
"@docusaurus/plugin-content-blog": "3.1.0",
"@docusaurus/plugin-content-docs": "3.1.0",
"@docusaurus/plugin-content-pages": "3.1.0",
"@docusaurus/theme-common": "3.1.0",
"@docusaurus/theme-translations": "3.1.0",
"@docusaurus/types": "3.1.0",
"@docusaurus/utils": "3.1.0",
"@docusaurus/utils-common": "3.1.0",
"@docusaurus/utils-validation": "3.1.0",
"@mdx-js/react": "^3.0.0",
"clsx": "^2.0.0",
"copy-text-to-clipboard": "^3.2.0",

View file

@ -363,6 +363,14 @@ declare module '@theme/CodeBlock' {
export default function CodeBlock(props: Props): JSX.Element;
}
declare module '@theme/CodeInline' {
import type {ComponentProps} from 'react';
export interface Props extends ComponentProps<'code'> {}
export default function CodeInline(props: Props): JSX.Element;
}
declare module '@theme/CodeBlock/CopyButton' {
export interface Props {
readonly code: string;

View file

@ -0,0 +1,16 @@
/**
* Copyright (c) Facebook, Inc. and its affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree.
*/
import React from 'react';
import type {Props} from '@theme/CodeInline';
// Simple component used to render inline code blocks
// its purpose is to be swizzled and customized
// MDX 1 used to have a inlineCode comp, see https://mdxjs.com/migrating/v2/
export default function CodeInline(props: Props): JSX.Element {
return <code {...props} />;
}

View file

@ -10,11 +10,13 @@ import clsx from 'clsx';
import {translate} from '@docusaurus/Translate';
import {useThemeConfig} from '@docusaurus/theme-common';
import Link from '@docusaurus/Link';
import useBrokenLinks from '@docusaurus/useBrokenLinks';
import type {Props} from '@theme/Heading';
import styles from './styles.module.css';
export default function Heading({as: As, id, ...props}: Props): JSX.Element {
const brokenLinks = useBrokenLinks();
const {
navbar: {hideOnScroll},
} = useThemeConfig();
@ -23,6 +25,8 @@ export default function Heading({as: As, id, ...props}: Props): JSX.Element {
return <As {...props} id={undefined} />;
}
brokenLinks.collectAnchor(id);
const anchorTitle = translate(
{
id: 'theme.common.headingLinkTitle',

View file

@ -8,15 +8,23 @@
import type {ComponentProps} from 'react';
import React from 'react';
import CodeBlock from '@theme/CodeBlock';
import CodeInline from '@theme/CodeInline';
import type {Props} from '@theme/MDXComponents/Code';
export default function MDXCode(props: Props): JSX.Element {
const shouldBeInline = React.Children.toArray(props.children).every(
function shouldBeInline(props: Props) {
return (
// empty code blocks have no props.children,
// see https://github.com/facebook/docusaurus/pull/9704
typeof props.children !== 'undefined' &&
React.Children.toArray(props.children).every(
(el) => typeof el === 'string' && !el.includes('\n'),
)
);
}
return shouldBeInline ? (
<code {...props} />
export default function MDXCode(props: Props): JSX.Element {
return shouldBeInline(props) ? (
<CodeInline {...props} />
) : (
<CodeBlock {...(props as ComponentProps<typeof CodeBlock>)} />
);

View file

@ -1,6 +1,6 @@
{
"name": "@docusaurus/theme-common",
"version": "3.0.1",
"version": "3.1.0",
"description": "Common code for Docusaurus themes.",
"main": "./lib/index.js",
"types": "./lib/index.d.ts",
@ -30,13 +30,13 @@
},
"license": "MIT",
"dependencies": {
"@docusaurus/mdx-loader": "3.0.1",
"@docusaurus/module-type-aliases": "3.0.1",
"@docusaurus/plugin-content-blog": "3.0.1",
"@docusaurus/plugin-content-docs": "3.0.1",
"@docusaurus/plugin-content-pages": "3.0.1",
"@docusaurus/utils": "3.0.1",
"@docusaurus/utils-common": "3.0.1",
"@docusaurus/mdx-loader": "3.1.0",
"@docusaurus/module-type-aliases": "3.1.0",
"@docusaurus/plugin-content-blog": "3.1.0",
"@docusaurus/plugin-content-docs": "3.1.0",
"@docusaurus/plugin-content-pages": "3.1.0",
"@docusaurus/utils": "3.1.0",
"@docusaurus/utils-common": "3.1.0",
"@types/history": "^4.7.11",
"@types/react": "*",
"@types/react-router-config": "*",
@ -47,8 +47,8 @@
"utility-types": "^3.10.0"
},
"devDependencies": {
"@docusaurus/core": "3.0.1",
"@docusaurus/types": "3.0.1",
"@docusaurus/core": "3.1.0",
"@docusaurus/types": "3.1.0",
"fs-extra": "^11.1.1",
"lodash": "^4.17.21"
},

View file

@ -17,15 +17,20 @@ const windowSizes = {
type WindowSize = keyof typeof windowSizes;
const DesktopThresholdWidth = 996;
// Note: this value is also hardcoded in Infima
// Both JS and CSS must have the same value
// Updating this JS value alone is not enough
// See https://github.com/facebook/docusaurus/issues/9603
const DesktopBreakpoint = 996;
function getWindowSize() {
function getWindowSize(desktopBreakpoint: number): WindowSize {
if (!ExecutionEnvironment.canUseDOM) {
throw new Error(
'getWindowSize() should only be called after React hydration',
);
}
return window.innerWidth > DesktopThresholdWidth
return window.innerWidth > desktopBreakpoint
? windowSizes.desktop
: windowSizes.mobile;
}
@ -40,7 +45,11 @@ function getWindowSize() {
* with mediaquery). We don't return `undefined` on purpose, to make it more
* explicit.
*/
export function useWindowSize(): WindowSize {
export function useWindowSize({
desktopBreakpoint = DesktopBreakpoint,
}: {
desktopBreakpoint?: number;
} = {}): WindowSize {
const [windowSize, setWindowSize] = useState<WindowSize>(
() =>
// super important to return a constant value to avoid hydration mismatch
@ -50,7 +59,7 @@ export function useWindowSize(): WindowSize {
useEffect(() => {
function updateWindowSize() {
setWindowSize(getWindowSize());
setWindowSize(getWindowSize(desktopBreakpoint));
}
updateWindowSize();
@ -60,7 +69,7 @@ export function useWindowSize(): WindowSize {
return () => {
window.removeEventListener('resize', updateWindowSize);
};
}, []);
}, [desktopBreakpoint]);
return windowSize;
}

View file

@ -13,18 +13,31 @@ const codeBlockTitleRegex = /title=(?<quote>["'])(?<title>.*?)\1/;
const metastringLinesRangeRegex = /\{(?<range>[\d,-]+)\}/;
// Supported types of highlight comments
const commentPatterns = {
const popularCommentPatterns = {
js: {start: '\\/\\/', end: ''},
jsBlock: {start: '\\/\\*', end: '\\*\\/'},
jsx: {start: '\\{\\s*\\/\\*', end: '\\*\\/\\s*\\}'},
bash: {start: '#', end: ''},
html: {start: '<!--', end: '-->'},
} as const;
const commentPatterns = {
...popularCommentPatterns, // shallow copy is sufficient
// minor comment styles
lua: {start: '--', end: ''},
wasm: {start: '\\;\\;', end: ''},
tex: {start: '%', end: ''},
};
vb: {start: "[']", end: ''},
rem: {start: '[Rr][Ee][Mm]\\b', end: ''},
f90: {start: '!', end: ''}, // Free format only
ml: {start: '\\(\\*', end: '\\*\\)'},
cobol: {start: '\\*>', end: ''}, // Free format only
} as const;
type CommentType = keyof typeof commentPatterns;
const popularCommentTypes = Object.keys(
popularCommentPatterns,
) as CommentType[];
export type MagicCommentConfig = {
className: string;
@ -99,15 +112,34 @@ function getAllMagicCommentDirectiveStyles(
case 'wasm':
return getCommentPattern(['wasm'], magicCommentDirectives);
case 'vb':
case 'vbnet':
case 'vba':
case 'visual-basic':
return getCommentPattern(['vb', 'rem'], magicCommentDirectives);
case 'batch':
return getCommentPattern(['rem'], magicCommentDirectives);
case 'basic': // https://github.com/PrismJS/prism/blob/master/components/prism-basic.js#L3
return getCommentPattern(['rem', 'f90'], magicCommentDirectives);
case 'fsharp':
return getCommentPattern(['js', 'ml'], magicCommentDirectives);
case 'ocaml':
case 'sml':
return getCommentPattern(['ml'], magicCommentDirectives);
case 'fortran':
return getCommentPattern(['f90'], magicCommentDirectives);
case 'cobol':
return getCommentPattern(['cobol'], magicCommentDirectives);
default:
// All comment types except lua, wasm and matlab
return getCommentPattern(
Object.keys(commentPatterns).filter(
(pattern) =>
!['lua', 'wasm', 'tex', 'latex', 'matlab'].includes(pattern),
) as CommentType[],
magicCommentDirectives,
);
// All popular comment types
return getCommentPattern(popularCommentTypes, magicCommentDirectives);
}
}

View file

@ -1,6 +1,6 @@
{
"name": "@docusaurus/theme-live-codeblock",
"version": "3.0.1",
"version": "3.1.0",
"description": "Docusaurus live code block component.",
"main": "lib/index.js",
"types": "src/theme-live-codeblock.d.ts",
@ -23,10 +23,10 @@
},
"license": "MIT",
"dependencies": {
"@docusaurus/core": "3.0.1",
"@docusaurus/theme-common": "3.0.1",
"@docusaurus/theme-translations": "3.0.1",
"@docusaurus/utils-validation": "3.0.1",
"@docusaurus/core": "3.1.0",
"@docusaurus/theme-common": "3.1.0",
"@docusaurus/theme-translations": "3.1.0",
"@docusaurus/utils-validation": "3.1.0",
"@philpl/buble": "^0.19.7",
"clsx": "^2.0.0",
"fs-extra": "^11.1.1",
@ -34,7 +34,7 @@
"tslib": "^2.6.0"
},
"devDependencies": {
"@docusaurus/types": "3.0.1",
"@docusaurus/types": "3.1.0",
"@types/buble": "^0.20.1"
},
"peerDependencies": {

View file

@ -24,7 +24,8 @@ declare module '@theme/Playground' {
type LiveProviderProps = React.ComponentProps<typeof LiveProvider>;
export interface Props extends CodeBlockProps, LiveProviderProps {
children: string;
// Allow empty live playgrounds
children?: string;
}
export default function Playground(props: LiveProviderProps): JSX.Element;
}

View file

@ -98,6 +98,10 @@ function EditorWithHeader() {
);
}
// this should rather be a stable function
// see https://github.com/facebook/docusaurus/issues/9630#issuecomment-1855682643
const DEFAULT_TRANSFORM_CODE = (code: string) => `${code};`;
export default function Playground({
children,
transformCode,
@ -116,9 +120,9 @@ export default function Playground({
return (
<div className={styles.playgroundContainer}>
<LiveProvider
code={children.replace(/\n$/, '')}
code={children?.replace(/\n$/, '')}
noInline={noInline}
transformCode={transformCode ?? ((code) => `${code};`)}
transformCode={transformCode ?? DEFAULT_TRANSFORM_CODE}
theme={prismTheme}
{...props}>
{playgroundPosition === 'top' ? (

View file

@ -1,6 +1,6 @@
{
"name": "@docusaurus/theme-mermaid",
"version": "3.0.1",
"version": "3.1.0",
"description": "Mermaid components for Docusaurus.",
"main": "lib/index.js",
"types": "src/theme-mermaid.d.ts",
@ -33,11 +33,11 @@
"copy:watch": "node ../../admin/scripts/copyUntypedFiles.js --watch"
},
"dependencies": {
"@docusaurus/core": "3.0.1",
"@docusaurus/module-type-aliases": "3.0.1",
"@docusaurus/theme-common": "3.0.1",
"@docusaurus/types": "3.0.1",
"@docusaurus/utils-validation": "3.0.1",
"@docusaurus/core": "3.1.0",
"@docusaurus/module-type-aliases": "3.1.0",
"@docusaurus/theme-common": "3.1.0",
"@docusaurus/types": "3.1.0",
"@docusaurus/utils-validation": "3.1.0",
"mermaid": "^10.4.0",
"tslib": "^2.6.0"
},

View file

@ -1,6 +1,6 @@
{
"name": "@docusaurus/theme-search-algolia",
"version": "3.0.1",
"version": "3.1.0",
"description": "Algolia search component for Docusaurus.",
"main": "lib/index.js",
"sideEffects": [
@ -34,13 +34,13 @@
},
"dependencies": {
"@docsearch/react": "^3.5.2",
"@docusaurus/core": "3.0.1",
"@docusaurus/logger": "3.0.1",
"@docusaurus/plugin-content-docs": "3.0.1",
"@docusaurus/theme-common": "3.0.1",
"@docusaurus/theme-translations": "3.0.1",
"@docusaurus/utils": "3.0.1",
"@docusaurus/utils-validation": "3.0.1",
"@docusaurus/core": "3.1.0",
"@docusaurus/logger": "3.1.0",
"@docusaurus/plugin-content-docs": "3.1.0",
"@docusaurus/theme-common": "3.1.0",
"@docusaurus/theme-translations": "3.1.0",
"@docusaurus/utils": "3.1.0",
"@docusaurus/utils-validation": "3.1.0",
"algoliasearch": "^4.18.0",
"algoliasearch-helper": "^3.13.3",
"clsx": "^2.0.0",
@ -51,7 +51,7 @@
"utility-types": "^3.10.0"
},
"devDependencies": {
"@docusaurus/module-type-aliases": "3.0.1"
"@docusaurus/module-type-aliases": "3.1.0"
},
"peerDependencies": {
"react": "^18.0.0",

View file

@ -6,6 +6,7 @@
*/
import React, {useCallback, useMemo, useRef, useState} from 'react';
import {createPortal} from 'react-dom';
import {DocSearchButton, useDocSearchKeyboardEvents} from '@docsearch/react';
import Head from '@docusaurus/Head';
import Link from '@docusaurus/Link';
@ -20,7 +21,6 @@ import {
} from '@docusaurus/theme-search-algolia/client';
import Translate from '@docusaurus/Translate';
import useDocusaurusContext from '@docusaurus/useDocusaurusContext';
import {createPortal} from 'react-dom';
import translations from '@theme/SearchTranslations';
import type {AutocompleteState} from '@algolia/autocomplete-core';

View file

@ -4,22 +4,22 @@
"theme.CodeBlock.copied": "Copiado",
"theme.CodeBlock.copy": "Copiar",
"theme.CodeBlock.copyButtonAriaLabel": "Copiar código para a área de transferência",
"theme.CodeBlock.wordWrapToggle": "Toggle word wrap",
"theme.DocSidebarItem.collapseCategoryAriaLabel": "Collapse sidebar category '{label}'",
"theme.DocSidebarItem.expandCategoryAriaLabel": "Expand sidebar category '{label}'",
"theme.ErrorPageContent.title": "This page crashed.",
"theme.ErrorPageContent.tryAgain": "Try again",
"theme.CodeBlock.wordWrapToggle": "Alternar quebra de linha",
"theme.DocSidebarItem.collapseCategoryAriaLabel": "Fechar a categoria lateral '{label}'",
"theme.DocSidebarItem.expandCategoryAriaLabel": "Expandir a categoria lateral '{label}'",
"theme.ErrorPageContent.title": "Esta página deu erro.",
"theme.ErrorPageContent.tryAgain": "Tente novamente",
"theme.NavBar.navAriaLabel": "Main",
"theme.NotFound.p1": "Não foi possível encontrar o que você está procurando.",
"theme.NotFound.p2": "Entre em contato com o proprietário do site que lhe trouxe para cá e lhe informe que o link está quebrado.",
"theme.NotFound.title": "Página não encontrada",
"theme.TOCCollapsible.toggleButtonLabel": "Nessa página",
"theme.admonition.caution": "caution",
"theme.admonition.danger": "danger",
"theme.admonition.caution": "cuidado",
"theme.admonition.danger": "perigo",
"theme.admonition.info": "info",
"theme.admonition.note": "note",
"theme.admonition.tip": "tip",
"theme.admonition.warning": "warning",
"theme.admonition.note": "nota",
"theme.admonition.tip": "dica",
"theme.admonition.warning": "atenção",
"theme.blog.archive.description": "Arquivo",
"theme.blog.archive.title": "Arquivo",
"theme.blog.paginator.navAriaLabel": "Navegação da página de listagem do blog",
@ -30,32 +30,32 @@
"theme.blog.post.paginator.olderPost": "Postagem mais antiga",
"theme.blog.post.plurals": "Uma postagem|{count} postagens",
"theme.blog.post.readMore": "Leia Mais",
"theme.blog.post.readMoreLabel": "Read more about {title}",
"theme.blog.post.readMoreLabel": "Ler mais sobre {title}",
"theme.blog.post.readingTime.plurals": "Leitura de um minuto|Leitura de {readingTime} minutos",
"theme.blog.sidebar.navAriaLabel": "Blog recent posts navigation",
"theme.blog.tagTitle": "{nPosts} marcadas com \"{tagName}\"",
"theme.colorToggle.ariaLabel": "Switch between dark and light mode (currently {mode})",
"theme.colorToggle.ariaLabel.mode.dark": "dark mode",
"theme.colorToggle.ariaLabel.mode.light": "light mode",
"theme.colorToggle.ariaLabel": "Alterar entre os modos claro e escuro (modo {mode} ativado)",
"theme.colorToggle.ariaLabel.mode.dark": "modo escuro",
"theme.colorToggle.ariaLabel.mode.light": "modo claro",
"theme.common.editThisPage": "Editar essa página",
"theme.common.headingLinkTitle": "Link direto para {heading}",
"theme.common.skipToMainContent": "Pular para o conteúdo principal",
"theme.docs.DocCard.categoryDescription": "{count} items",
"theme.docs.breadcrumbs.home": "Home page",
"theme.docs.breadcrumbs.home": "Página Inicial",
"theme.docs.breadcrumbs.navAriaLabel": "Breadcrumbs",
"theme.docs.paginator.navAriaLabel": "Páginas de documentação",
"theme.docs.paginator.next": "Próxima",
"theme.docs.paginator.previous": "Anterior",
"theme.docs.sidebar.closeSidebarButtonAriaLabel": "Close navigation bar",
"theme.docs.sidebar.closeSidebarButtonAriaLabel": "Fechar barra de navegação",
"theme.docs.sidebar.collapseButtonAriaLabel": "Fechar painel lateral",
"theme.docs.sidebar.collapseButtonTitle": "Fechar painel lateral",
"theme.docs.sidebar.expandButtonAriaLabel": "Expandir painel lateral",
"theme.docs.sidebar.expandButtonTitle": "Expandir painel lateral",
"theme.docs.sidebar.navAriaLabel": "Docs sidebar",
"theme.docs.sidebar.toggleSidebarButtonAriaLabel": "Toggle navigation bar",
"theme.docs.sidebar.toggleSidebarButtonAriaLabel": "Alternar a barra de navegação",
"theme.docs.tagDocListPageTitle": "{nDocsTagged} com \"{tagName}\"",
"theme.docs.tagDocListPageTitle.nDocsTagged": "Um documento selecionado|{count} documentos selecionados",
"theme.docs.versionBadge.label": "Version: {versionLabel}",
"theme.docs.versionBadge.label": "Versão: {versionLabel}",
"theme.docs.versions.latestVersionLinkLabel": "última versão",
"theme.docs.versions.latestVersionSuggestionLabel": "Para a documentação atualizada, veja: {latestVersionLink} ({versionLabel}).",
"theme.docs.versions.unmaintainedVersionLabel": "Esta é a documentação para {siteTitle} {versionLabel}, que não é mais mantida ativamente.",
@ -63,12 +63,12 @@
"theme.lastUpdated.atDate": " em {date}",
"theme.lastUpdated.byUser": " por {user}",
"theme.lastUpdated.lastUpdatedAtBy": "Última atualização {atDate}{byUser}",
"theme.navbar.mobileLanguageDropdown.label": "Languages",
"theme.navbar.mobileLanguageDropdown.label": "Linguagens",
"theme.navbar.mobileSidebarSecondaryMenu.backButtonLabel": "← Voltar para o menu principal",
"theme.navbar.mobileVersionsDropdown.label": "Versions",
"theme.navbar.mobileVersionsDropdown.label": "Versões",
"theme.tags.tagsListLabel": "Marcadores:",
"theme.tags.tagsPageLink": "Ver todas os Marcadores",
"theme.tags.tagsPageTitle": "Marcadores",
"theme.unlistedContent.message": "This page is unlisted. Search engines will not index it, and only users having a direct link can access it.",
"theme.unlistedContent.title": "Unlisted page"
"theme.unlistedContent.title": "Página não listada"
}

View file

@ -1,6 +1,6 @@
{
"name": "@docusaurus/theme-translations",
"version": "3.0.1",
"version": "3.1.0",
"description": "Docusaurus theme translations.",
"main": "lib/index.js",
"types": "lib/index.d.ts",
@ -23,8 +23,8 @@
"tslib": "^2.6.0"
},
"devDependencies": {
"@docusaurus/core": "3.0.1",
"@docusaurus/logger": "3.0.1",
"@docusaurus/core": "3.1.0",
"@docusaurus/logger": "3.1.0",
"lodash": "^4.17.21"
},
"engines": {

View file

@ -1,6 +1,6 @@
{
"name": "@docusaurus/tsconfig",
"version": "3.0.1",
"version": "3.1.0",
"description": "Docusaurus base TypeScript configuration.",
"main": "tsconfig.json",
"publishConfig": {

View file

@ -1,6 +1,6 @@
{
"name": "@docusaurus/types",
"version": "3.0.1",
"version": "3.1.0",
"description": "Common types for Docusaurus packages.",
"types": "./src/index.d.ts",
"publishConfig": {
@ -13,6 +13,7 @@
},
"license": "MIT",
"dependencies": {
"@mdx-js/mdx": "^3.0.0",
"@types/history": "^4.7.11",
"@types/react": "*",
"commander": "^5.1.0",

View file

@ -10,6 +10,10 @@ import type {Required as RequireKeys, DeepPartial} from 'utility-types';
import type {I18nConfig} from './i18n';
import type {PluginConfig, PresetConfig, HtmlTagObject} from './plugin';
import type {ProcessorOptions} from '@mdx-js/mdx';
export type RemarkRehypeOptions = ProcessorOptions['remarkRehypeOptions'];
export type ReportingSeverity = 'ignore' | 'log' | 'warn' | 'throw';
export type ThemeConfig = {
@ -27,6 +31,20 @@ export type MDX1CompatOptions = {
headingIds: boolean;
};
export type ParseFrontMatterParams = {filePath: string; fileContent: string};
export type ParseFrontMatterResult = {
frontMatter: {[key: string]: unknown};
content: string;
};
export type DefaultParseFrontMatter = (
params: ParseFrontMatterParams,
) => Promise<ParseFrontMatterResult>;
export type ParseFrontMatter = (
params: ParseFrontMatterParams & {
defaultParseFrontMatter: DefaultParseFrontMatter;
},
) => Promise<ParseFrontMatterResult>;
export type MarkdownConfig = {
/**
* The Markdown format to use by default.
@ -44,6 +62,14 @@ export type MarkdownConfig = {
*/
format: 'mdx' | 'md' | 'detect';
/**
* A function callback that lets users parse the front matter themselves.
* Gives the opportunity to read it from a different source, or process it.
*
* @see https://github.com/facebook/docusaurus/issues/5568
*/
parseFrontMatter: ParseFrontMatter;
/**
* Allow mermaid language code blocks to be rendered into Mermaid diagrams:
*
@ -69,6 +95,12 @@ export type MarkdownConfig = {
* See also https://github.com/facebook/docusaurus/issues/4029
*/
mdx1Compat: MDX1CompatOptions;
/**
* Ability to provide custom remark-rehype options
* See also https://github.com/remarkjs/remark-rehype#options
*/
remarkRehypeOptions: RemarkRehypeOptions;
};
/**
@ -143,6 +175,13 @@ export type DocusaurusConfig = {
* @default "throw"
*/
onBrokenLinks: ReportingSeverity;
/**
* The behavior of Docusaurus when it detects any broken link.
*
* @see https://docusaurus.io/docs/api/docusaurus-config#onBrokenAnchors
* @default "warn"
*/
onBrokenAnchors: ReportingSeverity;
/**
* The behavior of Docusaurus when it detects any broken markdown link.
*

View file

@ -9,6 +9,8 @@ export {
ReportingSeverity,
ThemeConfig,
MarkdownConfig,
DefaultParseFrontMatter,
ParseFrontMatter,
DocusaurusConfig,
Config,
} from './config';

View file

@ -1,6 +1,6 @@
{
"name": "@docusaurus/utils-common",
"version": "3.0.1",
"version": "3.1.0",
"description": "Common (Node/Browser) utility functions for Docusaurus packages.",
"main": "./lib/index.js",
"types": "./lib/index.d.ts",

View file

@ -1,6 +1,6 @@
{
"name": "@docusaurus/utils-validation",
"version": "3.0.1",
"version": "3.1.0",
"description": "Node validation utility functions for Docusaurus packages.",
"main": "./lib/index.js",
"types": "./lib/index.d.ts",
@ -18,8 +18,8 @@
},
"license": "MIT",
"dependencies": {
"@docusaurus/logger": "3.0.1",
"@docusaurus/utils": "3.0.1",
"@docusaurus/logger": "3.1.0",
"@docusaurus/utils": "3.1.0",
"joi": "^17.9.2",
"js-yaml": "^4.1.0",
"tslib": "^2.6.0"

View file

@ -1,6 +1,6 @@
{
"name": "@docusaurus/utils",
"version": "3.0.1",
"version": "3.1.0",
"description": "Node utility functions for Docusaurus packages.",
"main": "./lib/index.js",
"types": "./lib/index.d.ts",
@ -18,7 +18,7 @@
},
"license": "MIT",
"dependencies": {
"@docusaurus/logger": "3.0.1",
"@docusaurus/logger": "3.1.0",
"@svgr/webpack": "^6.5.1",
"escape-string-regexp": "^4.0.0",
"file-loader": "^6.2.0",
@ -40,7 +40,7 @@
"node": ">=18.0"
},
"devDependencies": {
"@docusaurus/types": "3.0.1",
"@docusaurus/types": "3.1.0",
"@types/dedent": "^0.7.0",
"@types/github-slugger": "^1.3.0",
"@types/micromatch": "^4.0.2",

View file

@ -176,6 +176,7 @@ exports[`replaceMarkdownLinks replaces links with same title as URL 1`] = `
"brokenMarkdownLinks": [],
"newContent": "
[foo.md](/docs/foo)
[./foo.md](</docs/foo>)
[./foo.md](/docs/foo)
[foo.md](/docs/foo)
[./foo.md](/docs/foo)

View file

@ -1,6 +1,6 @@
// Jest Snapshot v1, https://goo.gl/fbAQLP
exports[`parseMarkdownString deletes only first heading 1`] = `
exports[`parseMarkdownFile deletes only first heading 1`] = `
{
"content": "# Markdown Title
@ -15,7 +15,7 @@ test test test # test bar
}
`;
exports[`parseMarkdownString deletes only first heading 2 1`] = `
exports[`parseMarkdownFile deletes only first heading 2 1`] = `
{
"content": "# test
@ -30,7 +30,7 @@ test3",
}
`;
exports[`parseMarkdownString does not warn for duplicate title if markdown title is not at the top 1`] = `
exports[`parseMarkdownFile does not warn for duplicate title if markdown title is not at the top 1`] = `
{
"content": "foo
@ -43,7 +43,7 @@ exports[`parseMarkdownString does not warn for duplicate title if markdown title
}
`;
exports[`parseMarkdownString handles code blocks 1`] = `
exports[`parseMarkdownFile handles code blocks 1`] = `
{
"content": "\`\`\`js
code
@ -56,7 +56,7 @@ Content",
}
`;
exports[`parseMarkdownString handles code blocks 2`] = `
exports[`parseMarkdownFile handles code blocks 2`] = `
{
"content": "\`\`\`\`js
Foo
@ -73,7 +73,7 @@ Content",
}
`;
exports[`parseMarkdownString handles code blocks 3`] = `
exports[`parseMarkdownFile handles code blocks 3`] = `
{
"content": "\`\`\`\`js
Foo
@ -88,7 +88,7 @@ Content",
}
`;
exports[`parseMarkdownString ignores markdown title if its not a first text 1`] = `
exports[`parseMarkdownFile ignores markdown title if its not a first text 1`] = `
{
"content": "foo
# test",
@ -98,7 +98,21 @@ exports[`parseMarkdownString ignores markdown title if its not a first text 1`]
}
`;
exports[`parseMarkdownString parse markdown with front matter 1`] = `
exports[`parseMarkdownFile parse markdown with custom front matter parser 1`] = `
{
"content": "Some text",
"contentTitle": undefined,
"excerpt": "Some text",
"frontMatter": {
"age": 84,
"extra": "value",
"great": true,
"title": "Frontmatter title",
},
}
`;
exports[`parseMarkdownFile parse markdown with front matter 1`] = `
{
"content": "Some text",
"contentTitle": undefined,
@ -109,7 +123,7 @@ exports[`parseMarkdownString parse markdown with front matter 1`] = `
}
`;
exports[`parseMarkdownString parses first heading as contentTitle 1`] = `
exports[`parseMarkdownFile parses first heading as contentTitle 1`] = `
{
"content": "# Markdown Title
@ -120,7 +134,7 @@ Some text",
}
`;
exports[`parseMarkdownString parses front-matter and ignore h2 1`] = `
exports[`parseMarkdownFile parses front-matter and ignore h2 1`] = `
{
"content": "## test",
"contentTitle": undefined,
@ -131,7 +145,7 @@ exports[`parseMarkdownString parses front-matter and ignore h2 1`] = `
}
`;
exports[`parseMarkdownString parses title only 1`] = `
exports[`parseMarkdownFile parses title only 1`] = `
{
"content": "# test",
"contentTitle": "test",
@ -140,7 +154,7 @@ exports[`parseMarkdownString parses title only 1`] = `
}
`;
exports[`parseMarkdownString parses title only alternate 1`] = `
exports[`parseMarkdownFile parses title only alternate 1`] = `
{
"content": "test
===",
@ -150,7 +164,7 @@ exports[`parseMarkdownString parses title only alternate 1`] = `
}
`;
exports[`parseMarkdownString reads front matter only 1`] = `
exports[`parseMarkdownFile reads front matter only 1`] = `
{
"content": "",
"contentTitle": undefined,
@ -161,7 +175,7 @@ exports[`parseMarkdownString reads front matter only 1`] = `
}
`;
exports[`parseMarkdownString warns about duplicate titles (front matter + markdown alternate) 1`] = `
exports[`parseMarkdownFile warns about duplicate titles (front matter + markdown alternate) 1`] = `
{
"content": "Markdown Title alternate
================
@ -175,7 +189,7 @@ Some text",
}
`;
exports[`parseMarkdownString warns about duplicate titles (front matter + markdown) 1`] = `
exports[`parseMarkdownFile warns about duplicate titles (front matter + markdown) 1`] = `
{
"content": "# Markdown Title
@ -188,7 +202,7 @@ Some text",
}
`;
exports[`parseMarkdownString warns about duplicate titles 1`] = `
exports[`parseMarkdownFile warns about duplicate titles 1`] = `
{
"content": "# test",
"contentTitle": "test",

View file

@ -231,6 +231,7 @@ The following operations are defined for [URI]s:
},
fileString: `
[foo.md](foo.md)
[./foo.md](<./foo.md>)
[./foo.md](./foo.md)
[foo.md](./foo.md)
[./foo.md](foo.md)

View file

@ -9,12 +9,14 @@ import dedent from 'dedent';
import {
createExcerpt,
parseMarkdownContentTitle,
parseMarkdownString,
parseMarkdownHeadingId,
writeMarkdownHeadingId,
escapeMarkdownHeadingIds,
unwrapMdxCodeBlocks,
admonitionTitleToDirectiveLabel,
parseMarkdownFile,
DEFAULT_PARSE_FRONT_MATTER,
parseFileContentFrontMatter,
} from '../markdownUtils';
describe('createExcerpt', () => {
@ -623,32 +625,110 @@ Lorem Ipsum
});
});
describe('parseMarkdownString', () => {
it('parse markdown with front matter', () => {
expect(
parseMarkdownString(dedent`
describe('parseFileContentFrontMatter', () => {
function test(fileContent: string) {
return parseFileContentFrontMatter(fileContent);
}
it('can parse front matter', () => {
const input = dedent`
---
title: Frontmatter title
author:
age: 42
birth: 2000-07-23
---
Some text
`;
const expectedResult = {
content: 'Some text',
frontMatter: {
title: 'Frontmatter title',
author: {age: 42, birth: new Date('2000-07-23')},
},
};
const result = test(input) as typeof expectedResult;
expect(result).toEqual(expectedResult);
expect(result.frontMatter.author.birth).toBeInstanceOf(Date);
// A regression test, ensure we don't return gray-matter cached objects
result.frontMatter.title = 'modified';
// @ts-expect-error: ok
result.frontMatter.author.age = 53;
expect(test(input)).toEqual(expectedResult);
});
});
describe('parseMarkdownFile', () => {
async function test(
fileContent: string,
options?: Partial<Parameters<typeof parseMarkdownFile>>[0],
) {
return parseMarkdownFile({
fileContent,
filePath: 'some-file-path.mdx',
parseFrontMatter: DEFAULT_PARSE_FRONT_MATTER,
...options,
});
}
it('parse markdown with front matter', async () => {
await expect(
test(dedent`
---
title: Frontmatter title
---
Some text
`),
).toMatchSnapshot();
).resolves.toMatchSnapshot();
});
it('parses first heading as contentTitle', () => {
expect(
parseMarkdownString(dedent`
it('parse markdown with custom front matter parser', async () => {
await expect(
test(
dedent`
---
title: Frontmatter title
age: 42
---
Some text
`,
{
parseFrontMatter: async (params) => {
const result = await params.defaultParseFrontMatter(params);
return {
...result,
frontMatter: {
...result.frontMatter,
age: result.frontMatter.age * 2,
extra: 'value',
great: true,
},
};
},
},
),
).resolves.toMatchSnapshot();
});
it('parses first heading as contentTitle', async () => {
await expect(
test(dedent`
# Markdown Title
Some text
`),
).toMatchSnapshot();
).resolves.toMatchSnapshot();
});
it('warns about duplicate titles (front matter + markdown)', () => {
expect(
parseMarkdownString(dedent`
it('warns about duplicate titles (front matter + markdown)', async () => {
await expect(
test(dedent`
---
title: Frontmatter title
---
@ -657,12 +737,12 @@ describe('parseMarkdownString', () => {
Some text
`),
).toMatchSnapshot();
).resolves.toMatchSnapshot();
});
it('warns about duplicate titles (front matter + markdown alternate)', () => {
expect(
parseMarkdownString(dedent`
it('warns about duplicate titles (front matter + markdown alternate)', async () => {
await expect(
test(dedent`
---
title: Frontmatter title
---
@ -672,12 +752,12 @@ describe('parseMarkdownString', () => {
Some text
`),
).toMatchSnapshot();
).resolves.toMatchSnapshot();
});
it('does not warn for duplicate title if markdown title is not at the top', () => {
expect(
parseMarkdownString(dedent`
it('does not warn for duplicate title if markdown title is not at the top', async () => {
await expect(
test(dedent`
---
title: Frontmatter title
---
@ -686,12 +766,12 @@ describe('parseMarkdownString', () => {
# Markdown Title
`),
).toMatchSnapshot();
).resolves.toMatchSnapshot();
});
it('deletes only first heading', () => {
expect(
parseMarkdownString(dedent`
it('deletes only first heading', async () => {
await expect(
test(dedent`
# Markdown Title
test test test # test bar
@ -700,12 +780,12 @@ describe('parseMarkdownString', () => {
### Markdown Title h3
`),
).toMatchSnapshot();
).resolves.toMatchSnapshot();
});
it('parses front-matter and ignore h2', () => {
expect(
parseMarkdownString(
it('parses front-matter and ignore h2', async () => {
await expect(
test(
dedent`
---
title: Frontmatter title
@ -713,55 +793,55 @@ describe('parseMarkdownString', () => {
## test
`,
),
).toMatchSnapshot();
).resolves.toMatchSnapshot();
});
it('reads front matter only', () => {
expect(
parseMarkdownString(dedent`
it('reads front matter only', async () => {
await expect(
test(dedent`
---
title: test
---
`),
).toMatchSnapshot();
).resolves.toMatchSnapshot();
});
it('parses title only', () => {
expect(parseMarkdownString('# test')).toMatchSnapshot();
it('parses title only', async () => {
await expect(test('# test')).resolves.toMatchSnapshot();
});
it('parses title only alternate', () => {
expect(
parseMarkdownString(dedent`
it('parses title only alternate', async () => {
await expect(
test(dedent`
test
===
`),
).toMatchSnapshot();
).resolves.toMatchSnapshot();
});
it('warns about duplicate titles', () => {
expect(
parseMarkdownString(dedent`
it('warns about duplicate titles', async () => {
await expect(
test(dedent`
---
title: Frontmatter title
---
# test
`),
).toMatchSnapshot();
).resolves.toMatchSnapshot();
});
it('ignores markdown title if its not a first text', () => {
expect(
parseMarkdownString(dedent`
it('ignores markdown title if its not a first text', async () => {
await expect(
test(dedent`
foo
# test
`),
).toMatchSnapshot();
).resolves.toMatchSnapshot();
});
it('deletes only first heading 2', () => {
expect(
parseMarkdownString(dedent`
it('deletes only first heading 2', async () => {
await expect(
test(dedent`
# test
test test test test test test
@ -770,21 +850,21 @@ describe('parseMarkdownString', () => {
### test
test3
`),
).toMatchSnapshot();
).resolves.toMatchSnapshot();
});
it('handles code blocks', () => {
expect(
parseMarkdownString(dedent`
it('handles code blocks', async () => {
await expect(
test(dedent`
\`\`\`js
code
\`\`\`
Content
`),
).toMatchSnapshot();
expect(
parseMarkdownString(dedent`
).resolves.toMatchSnapshot();
await expect(
test(dedent`
\`\`\`\`js
Foo
\`\`\`diff
@ -795,9 +875,9 @@ describe('parseMarkdownString', () => {
Content
`),
).toMatchSnapshot();
expect(
parseMarkdownString(dedent`
).resolves.toMatchSnapshot();
await expect(
test(dedent`
\`\`\`\`js
Foo
\`\`\`diff
@ -806,17 +886,17 @@ describe('parseMarkdownString', () => {
Content
`),
).toMatchSnapshot();
).resolves.toMatchSnapshot();
});
it('throws for invalid front matter', () => {
expect(() =>
parseMarkdownString(dedent`
it('throws for invalid front matter', async () => {
await expect(
test(dedent`
---
foo: f: a
---
`),
).toThrowErrorMatchingInlineSnapshot(`
).rejects.toThrowErrorMatchingInlineSnapshot(`
"incomplete explicit mapping pair; a key node is missed; or followed by a non-tabulated empty line at line 2, column 7:
foo: f: a
^"

View file

@ -18,6 +18,8 @@ import {
buildSshUrl,
buildHttpsUrl,
hasSSHProtocol,
parseURLPath,
serializeURLPath,
} from '../urlUtils';
describe('normalizeUrl', () => {
@ -232,6 +234,137 @@ describe('removeTrailingSlash', () => {
});
});
describe('parseURLPath', () => {
it('parse and resolve pathname', () => {
expect(parseURLPath('')).toEqual({
pathname: '/',
search: undefined,
hash: undefined,
});
expect(parseURLPath('/')).toEqual({
pathname: '/',
search: undefined,
hash: undefined,
});
expect(parseURLPath('/page')).toEqual({
pathname: '/page',
search: undefined,
hash: undefined,
});
expect(parseURLPath('/dir1/page')).toEqual({
pathname: '/dir1/page',
search: undefined,
hash: undefined,
});
expect(parseURLPath('/dir1/dir2/./../page')).toEqual({
pathname: '/dir1/page',
search: undefined,
hash: undefined,
});
expect(parseURLPath('/dir1/dir2/../..')).toEqual({
pathname: '/',
search: undefined,
hash: undefined,
});
expect(parseURLPath('/dir1/dir2/../../..')).toEqual({
pathname: '/',
search: undefined,
hash: undefined,
});
expect(parseURLPath('./dir1/dir2./../page', '/dir3/dir4/page2')).toEqual({
pathname: '/dir3/dir4/dir1/page',
search: undefined,
hash: undefined,
});
});
it('parse query string', () => {
expect(parseURLPath('/page')).toEqual({
pathname: '/page',
search: undefined,
hash: undefined,
});
expect(parseURLPath('/page?')).toEqual({
pathname: '/page',
search: '',
hash: undefined,
});
expect(parseURLPath('/page?test')).toEqual({
pathname: '/page',
search: 'test',
hash: undefined,
});
expect(parseURLPath('/page?age=42&great=true')).toEqual({
pathname: '/page',
search: 'age=42&great=true',
hash: undefined,
});
});
it('parse hash', () => {
expect(parseURLPath('/page')).toEqual({
pathname: '/page',
search: undefined,
hash: undefined,
});
expect(parseURLPath('/page#')).toEqual({
pathname: '/page',
search: undefined,
hash: '',
});
expect(parseURLPath('/page#anchor')).toEqual({
pathname: '/page',
search: undefined,
hash: 'anchor',
});
});
it('parse fancy real-world edge cases', () => {
expect(parseURLPath('/page?#')).toEqual({
pathname: '/page',
search: '',
hash: '',
});
expect(
parseURLPath('dir1/dir2/../page?age=42#anchor', '/dir3/page2'),
).toEqual({
pathname: '/dir3/dir1/page',
search: 'age=42',
hash: 'anchor',
});
});
});
describe('serializeURLPath', () => {
function test(input: string, base?: string, expectedOutput?: string) {
expect(serializeURLPath(parseURLPath(input, base))).toEqual(
expectedOutput ?? input,
);
}
it('works for already resolved paths', () => {
test('/');
test('/dir1/page');
test('/dir1/page?');
test('/dir1/page#');
test('/dir1/page?#');
test('/dir1/page?age=42#anchor');
});
it('works for relative paths', () => {
test('', undefined, '/');
test('', '/dir1/dir2/page2', '/dir1/dir2/page2');
test('page', '/dir1/dir2/page2', '/dir1/dir2/page');
test('../page', '/dir1/dir2/page2', '/dir1/page');
test('/dir1/dir2/../page', undefined, '/dir1/page');
test(
'/dir1/dir2/../page?age=42#anchor',
undefined,
'/dir1/page?age=42#anchor',
);
});
});
describe('resolvePathname', () => {
it('works', () => {
// These tests are directly copied from https://github.com/mjackson/resolve-pathname/blob/master/modules/__tests__/resolvePathname-test.js

View file

@ -83,7 +83,9 @@ export const DEFAULT_I18N_DIR_NAME = 'i18n';
export const CODE_TRANSLATIONS_FILE_NAME = 'code.json';
/** Dev server opens on this port by default. */
export const DEFAULT_PORT = 3000;
export const DEFAULT_PORT = process.env.PORT
? parseInt(process.env.PORT, 10)
: 3000;
/** Default plugin ID. */
export const DEFAULT_PLUGIN_ID = 'default';

View file

@ -48,6 +48,8 @@ export {
encodePath,
isValidPathname,
resolvePathname,
parseURLPath,
serializeURLPath,
addLeadingSlash,
addTrailingSlash,
removeTrailingSlash,
@ -55,6 +57,7 @@ export {
buildHttpsUrl,
buildSshUrl,
} from './urlUtils';
export type {URLPath} from './urlUtils';
export {
type Tag,
type TagsListItem,
@ -70,9 +73,9 @@ export {
unwrapMdxCodeBlocks,
admonitionTitleToDirectiveLabel,
createExcerpt,
parseFrontMatter,
DEFAULT_PARSE_FRONT_MATTER,
parseMarkdownContentTitle,
parseMarkdownString,
parseMarkdownFile,
writeMarkdownHeadingId,
type WriteHeadingIDOptions,
} from './markdownUtils';

View file

@ -128,7 +128,7 @@ export function replaceMarkdownLinks<T extends ContentPaths>({
const linkSuffixPattern = '(?:\\?[^#>\\s]+)?(?:#[^>\\s]+)?';
const linkCapture = (forbidden: string) =>
`((?!https?://|@site/)[^${forbidden}#?]+)`;
const linkURLPattern = `(?:${linkCapture(
const linkURLPattern = `(?:(?!<)${linkCapture(
'()\\s',
)}${linkSuffixPattern}|<${linkCapture('>')}${linkSuffixPattern}>)`;
const linkPattern = new RegExp(

View file

@ -8,6 +8,10 @@
import logger from '@docusaurus/logger';
import matter from 'gray-matter';
import {createSlugger, type Slugger, type SluggerOptions} from './slugger';
import type {
ParseFrontMatter,
DefaultParseFrontMatter,
} from '@docusaurus/types';
// Some utilities for parsing Markdown content. These things are only used on
// server-side when we infer metadata like `title` and `description` from the
@ -214,19 +218,40 @@ export function createExcerpt(fileString: string): string | undefined {
* ---
* ```
*/
export function parseFrontMatter(markdownFileContent: string): {
export function parseFileContentFrontMatter(fileContent: string): {
/** Front matter as parsed by gray-matter. */
frontMatter: {[key: string]: unknown};
/** The remaining content, trimmed. */
content: string;
} {
const {data, content} = matter(markdownFileContent);
// TODO Docusaurus v4: replace gray-matter by a better lib
// gray-matter is unmaintained, not flexible, and the code doesn't look good
const {data, content} = matter(fileContent);
// gray-matter has an undocumented front matter caching behavior
// https://github.com/jonschlinkert/gray-matter/blob/ce67a86dba419381db0dd01cc84e2d30a1d1e6a5/index.js#L39
// Unfortunately, this becomes a problem when we mutate returned front matter
// We want to make it possible as part of the parseFrontMatter API
// So we make it safe to mutate by always providing a deep copy
const frontMatter =
// And of course structuredClone() doesn't work well with Date in Jest...
// See https://github.com/jestjs/jest/issues/2549
// So we parse again for tests with a {} option object
// This undocumented empty option object disables gray-matter caching..
process.env.JEST_WORKER_ID
? matter(fileContent, {}).data
: structuredClone(data);
return {
frontMatter: data,
frontMatter,
content: content.trim(),
};
}
export const DEFAULT_PARSE_FRONT_MATTER: DefaultParseFrontMatter = async (
params,
) => parseFileContentFrontMatter(params.fileContent);
function toTextContentTitle(contentTitle: string): string {
return contentTitle.replace(/`(?<text>[^`]*)`/g, '$<text>');
}
@ -309,10 +334,16 @@ export function parseMarkdownContentTitle(
* @throws Throws when `parseFrontMatter` throws, usually because of invalid
* syntax.
*/
export function parseMarkdownString(
markdownFileContent: string,
options?: ParseMarkdownContentTitleOptions,
): {
export async function parseMarkdownFile({
filePath,
fileContent,
parseFrontMatter,
removeContentTitle,
}: {
filePath: string;
fileContent: string;
parseFrontMatter: ParseFrontMatter;
} & ParseMarkdownContentTitleOptions): Promise<{
/** @see {@link parseFrontMatter} */
frontMatter: {[key: string]: unknown};
/** @see {@link parseMarkdownContentTitle} */
@ -324,14 +355,18 @@ export function parseMarkdownString(
* the `removeContentTitle` option.
*/
content: string;
} {
}> {
try {
const {frontMatter, content: contentWithoutFrontMatter} =
parseFrontMatter(markdownFileContent);
await parseFrontMatter({
filePath,
fileContent,
defaultParseFrontMatter: DEFAULT_PARSE_FRONT_MATTER,
});
const {content, contentTitle} = parseMarkdownContentTitle(
contentWithoutFrontMatter,
options,
{removeContentTitle},
);
const excerpt = createExcerpt(content);

View file

@ -165,14 +165,73 @@ export function isValidPathname(str: string): boolean {
}
}
export type URLPath = {pathname: string; search?: string; hash?: string};
// Let's name the concept of (pathname + search + hash) as URLPath
// See also https://twitter.com/kettanaito/status/1741768992866308120
// Note: this function also resolves relative pathnames while parsing!
export function parseURLPath(urlPath: string, fromPath?: string): URLPath {
function parseURL(url: string, base?: string | URL): URL {
try {
// A possible alternative? https://github.com/unjs/ufo#url
return new URL(url, base ?? 'https://example.com');
} catch (e) {
throw new Error(
`Can't parse URL ${url}${base ? ` with base ${base}` : ''}`,
{cause: e},
);
}
}
const base = fromPath ? parseURL(fromPath) : undefined;
const url = parseURL(urlPath, base);
const {pathname} = url;
// Fixes annoying url.search behavior
// "" => undefined
// "?" => ""
// "?param => "param"
const search = url.search
? url.search.slice(1)
: urlPath.includes('?')
? ''
: undefined;
// Fixes annoying url.hash behavior
// "" => undefined
// "#" => ""
// "?param => "param"
const hash = url.hash
? url.hash.slice(1)
: urlPath.includes('#')
? ''
: undefined;
return {
pathname,
search,
hash,
};
}
export function serializeURLPath(urlPath: URLPath): string {
const search = urlPath.search === undefined ? '' : `?${urlPath.search}`;
const hash = urlPath.hash === undefined ? '' : `#${urlPath.hash}`;
return `${urlPath.pathname}${search}${hash}`;
}
/**
* Resolve pathnames and fail-fast if resolution fails. Uses standard URL
* semantics (provided by `resolve-pathname` which is used internally by React
* router)
*/
export function resolvePathname(to: string, from?: string): string {
// TODO do we really need resolve-pathname lib anymore?
// possible alternative: decodeURI(parseURLPath(to, from).pathname);
return resolvePathnameUnsafe(to, from);
}
/** Appends a leading slash to `str`, if one doesn't exist. */
export function addLeadingSlash(str: string): string {
return addPrefix(str, '/');

View file

@ -218,6 +218,9 @@ cli.arguments('<command>').action((cmd) => {
logger.error` Unknown command name=${cmd}.`;
});
// === The above is the commander configuration ===
// They don't start any code execution yet until cli.parse() is called below
/**
* @param {string | undefined} command
*/
@ -237,12 +240,29 @@ function isInternalCommand(command) {
);
}
if (!isInternalCommand(process.argv.slice(2)[0])) {
await externalCommand(cli);
// process.argv always looks like this:
// [
// '/path/to/node',
// '/path/to/docusaurus.mjs',
// '<subcommand>',
// ...subcommandArgs
// ]
// There is no subcommand
// TODO: can we use commander to handle this case?
if (process.argv.length < 3 || process.argv[2]?.startsWith('--')) {
cli.outputHelp();
process.exit(1);
}
if (!process.argv.slice(2).length) {
cli.outputHelp();
// There is an unrecognized subcommand
// Let plugins extend the CLI before parsing
if (!isInternalCommand(process.argv[2])) {
// TODO: in this step, we must assume default site structure because there's
// no way to know the siteDir/config yet. Maybe the root cli should be
// responsible for parsing these arguments?
// https://github.com/facebook/docusaurus/issues/8903
await externalCommand(cli);
}
cli.parse(process.argv);

View file

@ -1,7 +1,7 @@
{
"name": "@docusaurus/core",
"description": "Easy to Maintain Open Source Documentation Websites",
"version": "3.0.1",
"version": "3.1.0",
"license": "MIT",
"publishConfig": {
"access": "public"
@ -43,13 +43,13 @@
"@babel/runtime": "^7.22.6",
"@babel/runtime-corejs3": "^7.22.6",
"@babel/traverse": "^7.22.8",
"@docusaurus/cssnano-preset": "3.0.1",
"@docusaurus/logger": "3.0.1",
"@docusaurus/mdx-loader": "3.0.1",
"@docusaurus/cssnano-preset": "3.1.0",
"@docusaurus/logger": "3.1.0",
"@docusaurus/mdx-loader": "3.1.0",
"@docusaurus/react-loadable": "5.5.2",
"@docusaurus/utils": "3.0.1",
"@docusaurus/utils-common": "3.0.1",
"@docusaurus/utils-validation": "3.0.1",
"@docusaurus/utils": "3.1.0",
"@docusaurus/utils-common": "3.1.0",
"@docusaurus/utils-validation": "3.1.0",
"@slorber/static-site-generator-webpack-plugin": "^4.0.7",
"@svgr/webpack": "^6.5.1",
"autoprefixer": "^10.4.14",
@ -104,8 +104,8 @@
"webpackbar": "^5.0.2"
},
"devDependencies": {
"@docusaurus/module-type-aliases": "3.0.1",
"@docusaurus/types": "3.0.1",
"@docusaurus/module-type-aliases": "3.1.0",
"@docusaurus/types": "3.1.0",
"@types/detect-port": "^1.3.3",
"@types/react-dom": "^18.2.7",
"@types/react-router-config": "^5.0.7",

View file

@ -0,0 +1,51 @@
/**
* Copyright (c) Facebook, Inc. and its affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree.
*/
import React, {type ReactNode, useContext} from 'react';
import type {BrokenLinks} from '@docusaurus/useBrokenLinks';
export type StatefulBrokenLinks = BrokenLinks & {
getCollectedLinks: () => string[];
getCollectedAnchors: () => string[];
};
export const createStatefulBrokenLinks = (): StatefulBrokenLinks => {
// Set to dedup, as it's not useful to collect multiple times the same value
const allAnchors = new Set<string>();
const allLinks = new Set<string>();
return {
collectAnchor: (anchor: string): void => {
allAnchors.add(anchor);
},
collectLink: (link: string): void => {
allLinks.add(link);
},
getCollectedAnchors: (): string[] => [...allAnchors],
getCollectedLinks: (): string[] => [...allLinks],
};
};
const Context = React.createContext<BrokenLinks>({
collectAnchor: () => {
// No-op for client
},
collectLink: () => {
// No-op for client
},
});
export const useBrokenLinksContext = (): BrokenLinks => useContext(Context);
export function BrokenLinksProvider({
children,
brokenLinks,
}: {
children: ReactNode;
brokenLinks: BrokenLinks;
}): JSX.Element {
return <Context.Provider value={brokenLinks}>{children}</Context.Provider>;
}

View file

@ -1,45 +0,0 @@
/**
* Copyright (c) Facebook, Inc. and its affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree.
*/
import React, {type ReactNode, useContext} from 'react';
type LinksCollector = {
collectLink: (link: string) => void;
};
type StatefulLinksCollector = LinksCollector & {
getCollectedLinks: () => string[];
};
export const createStatefulLinksCollector = (): StatefulLinksCollector => {
// Set to dedup, as it's not useful to collect multiple times the same link
const allLinks = new Set<string>();
return {
collectLink: (link: string): void => {
allLinks.add(link);
},
getCollectedLinks: (): string[] => [...allLinks],
};
};
const Context = React.createContext<LinksCollector>({
collectLink: () => {
// No-op for client. We only use the broken links checker server-side.
},
});
export const useLinksCollector = (): LinksCollector => useContext(Context);
export function LinksCollectorProvider({
children,
linksCollector,
}: {
children: ReactNode;
linksCollector: LinksCollector;
}): JSX.Element {
return <Context.Provider value={linksCollector}>{children}</Context.Provider>;
}

View file

@ -16,7 +16,7 @@ import {applyTrailingSlash} from '@docusaurus/utils-common';
import useDocusaurusContext from './useDocusaurusContext';
import isInternalUrl from './isInternalUrl';
import ExecutionEnvironment from './ExecutionEnvironment';
import {useLinksCollector} from '../LinksCollector';
import useBrokenLinks from './useBrokenLinks';
import {useBaseUrlUtils} from './useBaseUrl';
import type {Props} from '@docusaurus/Link';
@ -44,7 +44,7 @@ function Link(
siteConfig: {trailingSlash, baseUrl},
} = useDocusaurusContext();
const {withBaseUrl} = useBaseUrlUtils();
const linksCollector = useLinksCollector();
const brokenLinks = useBrokenLinks();
const innerRef = useRef<HTMLAnchorElement | null>(null);
useImperativeHandle(forwardedRef, () => innerRef.current!);
@ -144,7 +144,7 @@ function Link(
const isRegularHtmlLink = !targetLink || !isInternal || isAnchorLink;
if (!isRegularHtmlLink && !noBrokenLinkCheck) {
linksCollector.collectLink(targetLink!);
brokenLinks.collectLink(targetLink!);
}
return isRegularHtmlLink ? (

View file

@ -0,0 +1,13 @@
/**
* Copyright (c) Facebook, Inc. and its affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree.
*/
import {useBrokenLinksContext} from '../BrokenLinksContext';
import type {BrokenLinks} from '@docusaurus/useBrokenLinks';
export default function useBrokenLinks(): BrokenLinks {
return useBrokenLinksContext();
}

View file

@ -20,9 +20,9 @@ import {renderStaticApp} from './serverRenderer';
import preload from './preload';
import App from './App';
import {
createStatefulLinksCollector,
LinksCollectorProvider,
} from './LinksCollector';
createStatefulBrokenLinks,
BrokenLinksProvider,
} from './BrokenLinksContext';
import type {Locals} from '@slorber/static-site-generator-webpack-plugin';
const getCompiledSSRTemplate = _.memoize((template: string) =>
@ -96,23 +96,27 @@ async function doRender(locals: Locals & {path: string}) {
const routerContext = {};
const helmetContext = {};
const linksCollector = createStatefulLinksCollector();
const statefulBrokenLinks = createStatefulBrokenLinks();
const app = (
// @ts-expect-error: we are migrating away from react-loadable anyways
<Loadable.Capture report={(moduleName) => modules.add(moduleName)}>
<HelmetProvider context={helmetContext}>
<StaticRouter location={location} context={routerContext}>
<LinksCollectorProvider linksCollector={linksCollector}>
<BrokenLinksProvider brokenLinks={statefulBrokenLinks}>
<App />
</LinksCollectorProvider>
</BrokenLinksProvider>
</StaticRouter>
</HelmetProvider>
</Loadable.Capture>
);
const appHtml = await renderStaticApp(app);
onLinksCollected(location, linksCollector.getCollectedLinks());
onLinksCollected({
staticPagePath: location,
anchors: statefulBrokenLinks.getCollectedAnchors(),
links: statefulBrokenLinks.getCollectedLinks(),
});
const {helmet} = helmetContext as FilledContext;
const htmlAttributes = helmet.htmlAttributes.toString();

View file

@ -152,8 +152,8 @@ async function buildLocale({
generatedFilesDir,
plugins,
siteConfig: {
baseUrl,
onBrokenLinks,
onBrokenAnchors,
staticDirectories: staticDirectoriesOption,
},
routes,
@ -180,13 +180,15 @@ async function buildLocale({
},
);
const allCollectedLinks: {[location: string]: string[]} = {};
const collectedLinks: {
[pathname: string]: {links: string[]; anchors: string[]};
} = {};
const headTags: {[location: string]: HelmetServerState} = {};
let serverConfig: Configuration = await createServerConfig({
props,
onLinksCollected: (staticPagePath, links) => {
allCollectedLinks[staticPagePath] = links;
onLinksCollected: ({staticPagePath, links, anchors}) => {
collectedLinks[staticPagePath] = {links, anchors};
},
onHeadTagsCollected: (staticPagePath, tags) => {
headTags[staticPagePath] = tags;
@ -288,11 +290,10 @@ async function buildLocale({
);
await handleBrokenLinks({
allCollectedLinks,
collectedLinks,
routes,
onBrokenLinks,
outDir,
baseUrl,
onBrokenAnchors,
});
logger.success`Generated static files in path=${path.relative(

View file

@ -42,7 +42,11 @@ declare module '@slorber/static-site-generator-webpack-plugin' {
headTags: string;
preBodyTags: string;
postBodyTags: string;
onLinksCollected: (staticPagePath: string, links: string[]) => void;
onLinksCollected: (params: {
staticPagePath: string;
links: string[];
anchors: string[];
}) => void;
onHeadTagsCollected: (
staticPagePath: string,
tags: HelmetServerState,

View file

@ -1,86 +0,0 @@
// Jest Snapshot v1, https://goo.gl/fbAQLP
exports[`handleBrokenLinks reports all broken links 1`] = `
"Docusaurus found broken links!
Please check the pages of your site in the list below, and make sure you don't reference any path that does not exist.
Note: it's possible to ignore broken links with the 'onBrokenLinks' Docusaurus configuration, and let the build pass.
Exhaustive list of all broken links found:
- On source page path = /docs/good doc with space:
-> linking to ./some%20other%20non-existent%20doc1 (resolved as: /docs/some%20other%20non-existent%20doc1)
-> linking to ./break%2F..%2F..%2Fout2 (resolved as: /docs/break%2F..%2F..%2Fout2)
- On source page path = /docs/goodDoc:
-> linking to ../anotherGoodDoc#reported-because-of-bad-relative-path1 (resolved as: /anotherGoodDoc)
-> linking to ./docThatDoesNotExist2 (resolved as: /docs/docThatDoesNotExist2)
-> linking to ./badRelativeLink3 (resolved as: /docs/badRelativeLink3)
-> linking to ../badRelativeLink4 (resolved as: /badRelativeLink4)
- On source page path = /community:
-> linking to /someNonExistentDoc1
-> linking to /badLink2
-> linking to ./badLink3 (resolved as: /badLink3)
- On source page path = /page1:
-> linking to /link1
-> linking to /emptyFolder
- On source page path = /page2:
-> linking to /docs/link2
-> linking to /emptyFolder/
-> linking to /hey/link3
"
`;
exports[`handleBrokenLinks reports frequent broken links 1`] = `
"Docusaurus found broken links!
Please check the pages of your site in the list below, and make sure you don't reference any path that does not exist.
Note: it's possible to ignore broken links with the 'onBrokenLinks' Docusaurus configuration, and let the build pass.
It looks like some of the broken links we found appear in many pages of your site.
Maybe those broken links appear on all pages through your site layout?
We recommend that you check your theme configuration for such links (particularly, theme navbar and footer).
Frequent broken links are linking to:
- /frequent
- ./maybe-not
Exhaustive list of all broken links found:
- On source page path = /docs/good doc with space:
-> linking to ./some%20other%20non-existent%20doc1 (resolved as: /docs/some%20other%20non-existent%20doc1)
-> linking to ./break%2F..%2F..%2Fout2 (resolved as: /docs/break%2F..%2F..%2Fout2)
-> linking to /frequent
-> linking to ./maybe-not (resolved as: /docs/maybe-not)
- On source page path = /docs/goodDoc:
-> linking to ../anotherGoodDoc#reported-because-of-bad-relative-path1 (resolved as: /anotherGoodDoc)
-> linking to ./docThatDoesNotExist2 (resolved as: /docs/docThatDoesNotExist2)
-> linking to ./badRelativeLink3 (resolved as: /docs/badRelativeLink3)
-> linking to ../badRelativeLink4 (resolved as: /badRelativeLink4)
-> linking to /frequent
-> linking to ./maybe-not (resolved as: /docs/maybe-not)
- On source page path = /community:
-> linking to /someNonExistentDoc1
-> linking to /badLink2
-> linking to ./badLink3 (resolved as: /badLink3)
-> linking to /frequent
-> linking to ./maybe-not (resolved as: /maybe-not)
- On source page path = /page1:
-> linking to /link1
-> linking to /emptyFolder
-> linking to /frequent
-> linking to ./maybe-not (resolved as: /maybe-not)
- On source page path = /page2:
-> linking to /docs/link2
-> linking to /emptyFolder/
-> linking to /hey/link3
-> linking to /frequent
-> linking to ./maybe-not (resolved as: /maybe-not)
"
`;

View file

@ -24,9 +24,12 @@ exports[`loadSiteConfig website with .cjs siteConfig 1`] = `
"headingIds": true,
},
"mermaid": false,
"parseFrontMatter": [Function],
"preprocessor": undefined,
"remarkRehypeOptions": undefined,
},
"noIndex": false,
"onBrokenAnchors": "warn",
"onBrokenLinks": "throw",
"onBrokenMarkdownLinks": "warn",
"onDuplicateRoutes": "warn",
@ -72,9 +75,12 @@ exports[`loadSiteConfig website with ts + js config 1`] = `
"headingIds": true,
},
"mermaid": false,
"parseFrontMatter": [Function],
"preprocessor": undefined,
"remarkRehypeOptions": undefined,
},
"noIndex": false,
"onBrokenAnchors": "warn",
"onBrokenLinks": "throw",
"onBrokenMarkdownLinks": "warn",
"onDuplicateRoutes": "warn",
@ -120,9 +126,12 @@ exports[`loadSiteConfig website with valid JS CJS config 1`] = `
"headingIds": true,
},
"mermaid": false,
"parseFrontMatter": [Function],
"preprocessor": undefined,
"remarkRehypeOptions": undefined,
},
"noIndex": false,
"onBrokenAnchors": "warn",
"onBrokenLinks": "throw",
"onBrokenMarkdownLinks": "warn",
"onDuplicateRoutes": "warn",
@ -168,9 +177,12 @@ exports[`loadSiteConfig website with valid JS ESM config 1`] = `
"headingIds": true,
},
"mermaid": false,
"parseFrontMatter": [Function],
"preprocessor": undefined,
"remarkRehypeOptions": undefined,
},
"noIndex": false,
"onBrokenAnchors": "warn",
"onBrokenLinks": "throw",
"onBrokenMarkdownLinks": "warn",
"onDuplicateRoutes": "warn",
@ -216,9 +228,12 @@ exports[`loadSiteConfig website with valid TypeScript CJS config 1`] = `
"headingIds": true,
},
"mermaid": false,
"parseFrontMatter": [Function],
"preprocessor": undefined,
"remarkRehypeOptions": undefined,
},
"noIndex": false,
"onBrokenAnchors": "warn",
"onBrokenLinks": "throw",
"onBrokenMarkdownLinks": "warn",
"onDuplicateRoutes": "warn",
@ -264,9 +279,12 @@ exports[`loadSiteConfig website with valid TypeScript ESM config 1`] = `
"headingIds": true,
},
"mermaid": false,
"parseFrontMatter": [Function],
"preprocessor": undefined,
"remarkRehypeOptions": undefined,
},
"noIndex": false,
"onBrokenAnchors": "warn",
"onBrokenLinks": "throw",
"onBrokenMarkdownLinks": "warn",
"onDuplicateRoutes": "warn",
@ -312,9 +330,12 @@ exports[`loadSiteConfig website with valid async config 1`] = `
"headingIds": true,
},
"mermaid": false,
"parseFrontMatter": [Function],
"preprocessor": undefined,
"remarkRehypeOptions": undefined,
},
"noIndex": false,
"onBrokenAnchors": "warn",
"onBrokenLinks": "throw",
"onBrokenMarkdownLinks": "warn",
"onDuplicateRoutes": "warn",
@ -362,9 +383,12 @@ exports[`loadSiteConfig website with valid async config creator function 1`] = `
"headingIds": true,
},
"mermaid": false,
"parseFrontMatter": [Function],
"preprocessor": undefined,
"remarkRehypeOptions": undefined,
},
"noIndex": false,
"onBrokenAnchors": "warn",
"onBrokenLinks": "throw",
"onBrokenMarkdownLinks": "warn",
"onDuplicateRoutes": "warn",
@ -412,9 +436,12 @@ exports[`loadSiteConfig website with valid config creator function 1`] = `
"headingIds": true,
},
"mermaid": false,
"parseFrontMatter": [Function],
"preprocessor": undefined,
"remarkRehypeOptions": undefined,
},
"noIndex": false,
"onBrokenAnchors": "warn",
"onBrokenLinks": "throw",
"onBrokenMarkdownLinks": "warn",
"onDuplicateRoutes": "warn",
@ -465,9 +492,12 @@ exports[`loadSiteConfig website with valid siteConfig 1`] = `
"headingIds": true,
},
"mermaid": false,
"parseFrontMatter": [Function],
"preprocessor": undefined,
"remarkRehypeOptions": undefined,
},
"noIndex": false,
"onBrokenAnchors": "warn",
"onBrokenLinks": "throw",
"onBrokenMarkdownLinks": "warn",
"onDuplicateRoutes": "warn",

View file

@ -98,9 +98,12 @@ exports[`load loads props for site with custom i18n path 1`] = `
"headingIds": true,
},
"mermaid": false,
"parseFrontMatter": [Function],
"preprocessor": undefined,
"remarkRehypeOptions": undefined,
},
"noIndex": false,
"onBrokenAnchors": "warn",
"onBrokenLinks": "throw",
"onBrokenMarkdownLinks": "warn",
"onDuplicateRoutes": "warn",

View file

@ -6,190 +6,608 @@
*/
import {jest} from '@jest/globals';
import path from 'path';
import _ from 'lodash';
import {handleBrokenLinks} from '../brokenLinks';
import type {RouteConfig} from '@docusaurus/types';
type Params = Parameters<typeof handleBrokenLinks>[0];
// We don't need all the routes attributes for our tests
type SimpleRoute = {path: string; routes?: SimpleRoute[]};
// Conveniently apply defaults to function under test
async function testBrokenLinks(params: {
collectedLinks?: Params['collectedLinks'];
onBrokenLinks?: Params['onBrokenLinks'];
onBrokenAnchors?: Params['onBrokenAnchors'];
routes?: SimpleRoute[];
}) {
await handleBrokenLinks({
collectedLinks: {},
onBrokenLinks: 'throw',
onBrokenAnchors: 'throw',
...params,
// Unsafe but convenient for tests
routes: (params.routes ?? []) as RouteConfig[],
});
}
describe('handleBrokenLinks', () => {
const routes: RouteConfig[] = [
{
path: '/community',
component: '',
it('accepts valid link', async () => {
await testBrokenLinks({
routes: [{path: '/page1'}, {path: '/page2'}],
collectedLinks: {
'/page1': {links: ['/page2'], anchors: []},
'/page2': {links: [], anchors: []},
},
{
path: '/docs',
component: '',
});
});
it('accepts valid link to uncollected page', async () => {
await testBrokenLinks({
routes: [{path: '/page1'}, {path: '/page2'}],
collectedLinks: {
'/page1': {links: ['/page2'], anchors: []},
// /page2 is absent on purpose: it doesn't contain any link/anchor
},
});
});
it('accepts valid link to nested route', async () => {
await testBrokenLinks({
routes: [
{path: '/docs/goodDoc', component: ''},
{path: '/docs/anotherGoodDoc', component: ''},
{path: '/docs/good doc with space', component: ''},
{path: '/docs/another good doc with space', component: ''},
{path: '/docs/weird%20but%20good', component: ''},
{path: '/page1'},
{path: '/nested/', routes: [{path: '/nested/page2'}]},
],
collectedLinks: {
'/page1': {links: ['/nested/page2'], anchors: []},
},
{
path: '*',
component: '',
});
});
it('accepts valid relative link', async () => {
await testBrokenLinks({
routes: [{path: '/dir/page1'}, {path: '/dir/page2'}],
collectedLinks: {
'/dir/page1': {
links: ['./page2', '../dir/page2', '/dir/page2'],
anchors: [],
},
},
});
});
it('accepts valid link with anchor', async () => {
await testBrokenLinks({
routes: [{path: '/page1'}, {path: '/page2'}],
collectedLinks: {
'/page1': {links: ['/page2#page2anchor'], anchors: []},
'/page2': {links: [], anchors: ['page2anchor']},
},
});
});
it('accepts valid link with querystring + anchor', async () => {
await testBrokenLinks({
routes: [{path: '/page1'}, {path: '/page2'}],
collectedLinks: {
'/page1': {
links: ['/page2?age=42&theme=dark#page2anchor'],
anchors: [],
},
'/page2': {links: [], anchors: ['page2anchor']},
},
});
});
it('accepts valid link to self', async () => {
await testBrokenLinks({
routes: [{path: '/page1'}],
collectedLinks: {
'/page1': {
links: [
'/page1',
'./page1',
'',
'/page1#anchor1',
'#anchor1',
'/page1?age=42#anchor1',
'?age=42#anchor1',
],
anchors: ['anchor1'],
},
},
});
});
it('accepts valid link with spaces and encoding', async () => {
await testBrokenLinks({
routes: [{path: '/page 1'}, {path: '/page 2'}],
collectedLinks: {
'/page 1': {
links: [
'/page 1',
'/page%201',
'/page%201?age=42',
'/page 2',
'/page%202',
'/page%202?age=42',
'/page%202?age=42#page2anchor',
],
anchors: [],
},
'/page 2': {links: [], anchors: ['page2anchor']},
},
});
});
it('rejects broken link', async () => {
await expect(() =>
testBrokenLinks({
routes: [{path: '/page1'}, {path: '/page2'}],
collectedLinks: {
'/page1': {links: ['/brokenLink'], anchors: []},
},
}),
).rejects.toThrowErrorMatchingInlineSnapshot(`
"Docusaurus found broken links!
Please check the pages of your site in the list below, and make sure you don't reference any path that does not exist.
Note: it's possible to ignore broken links with the 'onBrokenLinks' Docusaurus configuration, and let the build pass.
Exhaustive list of all broken links found:
- Broken link on source page path = /page1:
-> linking to /brokenLink
"
`);
});
it('rejects broken link with anchor', async () => {
await expect(() =>
testBrokenLinks({
routes: [{path: '/page1'}, {path: '/page2'}],
collectedLinks: {
'/page1': {links: ['/brokenLink#anchor'], anchors: []},
},
}),
).rejects.toThrowErrorMatchingInlineSnapshot(`
"Docusaurus found broken links!
Please check the pages of your site in the list below, and make sure you don't reference any path that does not exist.
Note: it's possible to ignore broken links with the 'onBrokenLinks' Docusaurus configuration, and let the build pass.
Exhaustive list of all broken links found:
- Broken link on source page path = /page1:
-> linking to /brokenLink#anchor
"
`);
});
it('rejects broken link with querystring + anchor', async () => {
await expect(() =>
testBrokenLinks({
routes: [{path: '/page1'}, {path: '/page2'}],
collectedLinks: {
'/page1': {links: ['/brokenLink?age=42#anchor'], anchors: []},
},
}),
).rejects.toThrowErrorMatchingInlineSnapshot(`
"Docusaurus found broken links!
Please check the pages of your site in the list below, and make sure you don't reference any path that does not exist.
Note: it's possible to ignore broken links with the 'onBrokenLinks' Docusaurus configuration, and let the build pass.
Exhaustive list of all broken links found:
- Broken link on source page path = /page1:
-> linking to /brokenLink?age=42#anchor
"
`);
});
it('rejects valid link with broken anchor', async () => {
await expect(() =>
testBrokenLinks({
routes: [{path: '/page1'}, {path: '/page2'}],
collectedLinks: {
'/page1': {links: ['/page2#brokenAnchor'], anchors: []},
'/page2': {links: [], anchors: []},
},
}),
).rejects.toThrowErrorMatchingInlineSnapshot(`
"Docusaurus found broken anchors!
Please check the pages of your site in the list below, and make sure you don't reference any anchor that does not exist.
Note: it's possible to ignore broken anchors with the 'onBrokenAnchors' Docusaurus configuration, and let the build pass.
Exhaustive list of all broken anchors found:
- Broken anchor on source page path = /page1:
-> linking to /page2#brokenAnchor
"
`);
});
it('rejects valid link with empty broken anchor', async () => {
await expect(() =>
testBrokenLinks({
routes: [{path: '/page1'}, {path: '/page2'}],
collectedLinks: {
'/page1': {links: ['/page2#'], anchors: []},
'/page2': {links: [], anchors: []},
},
}),
).rejects.toThrowErrorMatchingInlineSnapshot(`
"Docusaurus found broken anchors!
Please check the pages of your site in the list below, and make sure you don't reference any anchor that does not exist.
Note: it's possible to ignore broken anchors with the 'onBrokenAnchors' Docusaurus configuration, and let the build pass.
Exhaustive list of all broken anchors found:
- Broken anchor on source page path = /page1:
-> linking to /page2#
"
`);
});
it('rejects valid link with broken anchor + query-string', async () => {
await expect(() =>
testBrokenLinks({
routes: [{path: '/page1'}, {path: '/page2'}],
collectedLinks: {
'/page1': {
links: ['/page2?age=42&theme=dark#brokenAnchor'],
anchors: [],
},
'/page2': {links: [], anchors: []},
},
}),
).rejects.toThrowErrorMatchingInlineSnapshot(`
"Docusaurus found broken anchors!
Please check the pages of your site in the list below, and make sure you don't reference any anchor that does not exist.
Note: it's possible to ignore broken anchors with the 'onBrokenAnchors' Docusaurus configuration, and let the build pass.
Exhaustive list of all broken anchors found:
- Broken anchor on source page path = /page1:
-> linking to /page2?age=42&theme=dark#brokenAnchor
"
`);
});
it('rejects valid link with broken anchor to self', async () => {
await expect(() =>
testBrokenLinks({
routes: [{path: '/page1'}],
collectedLinks: {
'/page1': {
links: [
'/page1',
'',
'#goodAnchor',
'/page1#goodAnchor',
'/page1?age=42#goodAnchor',
'#badAnchor1',
'/page1#badAnchor2',
'/page1?age=42#badAnchor3',
],
anchors: ['goodAnchor'],
},
},
}),
).rejects.toThrowErrorMatchingInlineSnapshot(`
"Docusaurus found broken anchors!
Please check the pages of your site in the list below, and make sure you don't reference any anchor that does not exist.
Note: it's possible to ignore broken anchors with the 'onBrokenAnchors' Docusaurus configuration, and let the build pass.
Exhaustive list of all broken anchors found:
- Broken anchor on source page path = /page1:
-> linking to #badAnchor1 (resolved as: /page1#badAnchor1)
-> linking to /page1#badAnchor2
-> linking to /page1?age=42#badAnchor3
"
`);
});
it('rejects valid link with broken anchor to uncollected page', async () => {
await expect(() =>
testBrokenLinks({
routes: [{path: '/page1'}, {path: '/page2'}],
collectedLinks: {
'/page1': {links: ['/page2#brokenAnchor'], anchors: []},
// /page2 is absent on purpose: it doesn't contain any link/anchor
},
}),
).rejects.toThrowErrorMatchingInlineSnapshot(`
"Docusaurus found broken anchors!
Please check the pages of your site in the list below, and make sure you don't reference any anchor that does not exist.
Note: it's possible to ignore broken anchors with the 'onBrokenAnchors' Docusaurus configuration, and let the build pass.
Exhaustive list of all broken anchors found:
- Broken anchor on source page path = /page1:
-> linking to /page2#brokenAnchor
"
`);
});
it('rejects broken anchor with query-string to uncollected page', async () => {
await expect(() =>
testBrokenLinks({
routes: [{path: '/page1'}, {path: '/page2'}],
collectedLinks: {
'/page1': {
links: ['/page2?age=42&theme=dark#brokenAnchor'],
anchors: [],
},
// /page2 is absent on purpose: it doesn't contain any link/anchor
},
}),
).rejects.toThrowErrorMatchingInlineSnapshot(`
"Docusaurus found broken anchors!
Please check the pages of your site in the list below, and make sure you don't reference any anchor that does not exist.
Note: it's possible to ignore broken anchors with the 'onBrokenAnchors' Docusaurus configuration, and let the build pass.
Exhaustive list of all broken anchors found:
- Broken anchor on source page path = /page1:
-> linking to /page2?age=42&theme=dark#brokenAnchor
"
`);
});
it('can ignore broken links', async () => {
await testBrokenLinks({
onBrokenLinks: 'ignore',
routes: [{path: '/page1'}],
collectedLinks: {
'/page1': {
links: ['/page2'],
anchors: [],
},
},
});
});
it('can ignore broken anchors', async () => {
await testBrokenLinks({
onBrokenAnchors: 'ignore',
routes: [{path: '/page1'}],
collectedLinks: {
'/page1': {
links: ['/page1#brokenAnchor'],
anchors: [],
},
},
});
});
it('can ignore broken anchors but report broken link', async () => {
await expect(() =>
testBrokenLinks({
onBrokenAnchors: 'ignore',
routes: [{path: '/page1'}],
collectedLinks: {
'/page1': {
links: ['/page1#brokenAnchor', '/page2'],
anchors: [],
},
},
}),
).rejects.toThrowErrorMatchingInlineSnapshot(`
"Docusaurus found broken links!
Please check the pages of your site in the list below, and make sure you don't reference any path that does not exist.
Note: it's possible to ignore broken links with the 'onBrokenLinks' Docusaurus configuration, and let the build pass.
Exhaustive list of all broken links found:
- Broken link on source page path = /page1:
-> linking to /page2
"
`);
});
it('can ignore broken link but report broken anchors', async () => {
await expect(() =>
testBrokenLinks({
onBrokenLinks: 'ignore',
routes: [{path: '/page1'}],
collectedLinks: {
'/page1': {
links: [
'/page2',
'/page1#brokenAnchor1',
'/page1#brokenAnchor2',
'#brokenAnchor3',
],
anchors: [],
},
},
}),
).rejects.toThrowErrorMatchingInlineSnapshot(`
"Docusaurus found broken anchors!
Please check the pages of your site in the list below, and make sure you don't reference any anchor that does not exist.
Note: it's possible to ignore broken anchors with the 'onBrokenAnchors' Docusaurus configuration, and let the build pass.
Exhaustive list of all broken anchors found:
- Broken anchor on source page path = /page1:
-> linking to /page1#brokenAnchor1
-> linking to /page1#brokenAnchor2
-> linking to #brokenAnchor3 (resolved as: /page1#brokenAnchor3)
"
`);
});
it('can warn for broken links', async () => {
const warnMock = jest.spyOn(console, 'warn');
await testBrokenLinks({
onBrokenLinks: 'warn',
routes: [{path: '/page1'}],
collectedLinks: {
'/page1': {
links: ['/page2'],
anchors: [],
},
},
});
expect(warnMock).toHaveBeenCalledTimes(1);
expect(warnMock.mock.calls).toMatchInlineSnapshot(`
[
[
"[WARNING] Docusaurus found broken links!
Please check the pages of your site in the list below, and make sure you don't reference any path that does not exist.
Note: it's possible to ignore broken links with the 'onBrokenLinks' Docusaurus configuration, and let the build pass.
Exhaustive list of all broken links found:
- Broken link on source page path = /page1:
-> linking to /page2
",
],
]
`);
warnMock.mockRestore();
});
it('can warn for broken anchors', async () => {
const warnMock = jest.spyOn(console, 'warn');
await testBrokenLinks({
onBrokenAnchors: 'warn',
routes: [{path: '/page1'}],
collectedLinks: {
'/page1': {
links: ['/page1#brokenAnchor'],
anchors: [],
},
},
});
expect(warnMock).toHaveBeenCalledTimes(1);
expect(warnMock.mock.calls).toMatchInlineSnapshot(`
[
[
"[WARNING] Docusaurus found broken anchors!
Please check the pages of your site in the list below, and make sure you don't reference any anchor that does not exist.
Note: it's possible to ignore broken anchors with the 'onBrokenAnchors' Docusaurus configuration, and let the build pass.
Exhaustive list of all broken anchors found:
- Broken anchor on source page path = /page1:
-> linking to /page1#brokenAnchor
",
],
]
`);
warnMock.mockRestore();
});
it('can warn for both broken links and anchors', async () => {
const warnMock = jest.spyOn(console, 'warn');
await testBrokenLinks({
onBrokenLinks: 'warn',
onBrokenAnchors: 'warn',
routes: [{path: '/page1'}],
collectedLinks: {
'/page1': {
links: ['/page1#brokenAnchor', '/page2'],
anchors: [],
},
},
});
expect(warnMock).toHaveBeenCalledTimes(2);
expect(warnMock.mock.calls).toMatchInlineSnapshot(`
[
[
"[WARNING] Docusaurus found broken links!
Please check the pages of your site in the list below, and make sure you don't reference any path that does not exist.
Note: it's possible to ignore broken links with the 'onBrokenLinks' Docusaurus configuration, and let the build pass.
Exhaustive list of all broken links found:
- Broken link on source page path = /page1:
-> linking to /page2
",
],
[
"[WARNING] Docusaurus found broken anchors!
Please check the pages of your site in the list below, and make sure you don't reference any anchor that does not exist.
Note: it's possible to ignore broken anchors with the 'onBrokenAnchors' Docusaurus configuration, and let the build pass.
Exhaustive list of all broken anchors found:
- Broken anchor on source page path = /page1:
-> linking to /page1#brokenAnchor
",
],
]
`);
warnMock.mockRestore();
});
it('reports frequent broken links differently', async () => {
const pagePaths = [
'/page1',
'/page2',
'/dir/page3',
'/dir/page4',
'/dir/page5',
];
const link1 = '/link1';
const link2 = '/docs/link2';
const link3 = '/hey/link3';
const routes: SimpleRoute[] = pagePaths.map((pagePath) => ({
path: pagePath,
}));
const linkToJavadoc1 = '/javadoc';
const linkToJavadoc2 = '/javadoc/';
const linkToJavadoc3 = '/javadoc/index.html';
const linkToJavadoc4 = '/javadoc/index.html#foo';
const linkToZipFile = '/files/file.zip';
const linkToHtmlFile1 = '/files/hey.html';
const linkToHtmlFile2 = '/files/hey';
const linkToEmptyFolder1 = '/emptyFolder';
const linkToEmptyFolder2 = '/emptyFolder/';
const allCollectedLinks = {
'/docs/good doc with space': [
// Good - valid file with spaces in name
'./another%20good%20doc%20with%20space',
// Good - valid file with percent-20 in its name
'./weird%20but%20good',
// Bad - non-existent file with spaces in name
'./some%20other%20non-existent%20doc1',
// Evil - trying to use ../../ but '/' won't get decoded
// cSpell:ignore Fout
'./break%2F..%2F..%2Fout2',
],
'/docs/goodDoc': [
// Good links
'./anotherGoodDoc#someHash',
'/docs/anotherGoodDoc?someQueryString=true#someHash',
'../docs/anotherGoodDoc?someQueryString=true',
'../docs/anotherGoodDoc#someHash',
// Bad links
'../anotherGoodDoc#reported-because-of-bad-relative-path1',
'./docThatDoesNotExist2',
'./badRelativeLink3',
'../badRelativeLink4',
],
'/community': [
// Good links
'/docs/goodDoc',
'/docs/anotherGoodDoc#someHash',
'./docs/goodDoc#someHash',
'./docs/anotherGoodDoc',
// Bad links
'/someNonExistentDoc1',
'/badLink2',
'./badLink3',
],
'/page1': [
link1,
linkToHtmlFile1,
linkToJavadoc1,
linkToHtmlFile2,
linkToJavadoc3,
linkToJavadoc4,
linkToEmptyFolder1, // Not filtered!
],
'/page2': [
link2,
linkToEmptyFolder2, // Not filtered!
linkToJavadoc2,
link3,
linkToJavadoc3,
linkToZipFile,
],
};
const outDir = path.resolve(__dirname, '__fixtures__/brokenLinks/outDir');
it('do not report anything for correct paths', async () => {
const consoleMock = jest
.spyOn(console, 'warn')
.mockImplementation(() => {});
const allCollectedCorrectLinks = {
'/docs/good doc with space': [
'./another%20good%20doc%20with%20space',
'./weird%20but%20good',
],
'/docs/goodDoc': [
'./anotherGoodDoc#someHash',
'/docs/anotherGoodDoc?someQueryString=true#someHash',
'../docs/anotherGoodDoc?someQueryString=true',
'../docs/anotherGoodDoc#someHash',
],
'/community': [
'/docs/goodDoc',
'/docs/anotherGoodDoc#someHash',
'./docs/goodDoc#someHash',
'./docs/anotherGoodDoc',
],
'/page1': [
linkToHtmlFile1,
linkToJavadoc1,
linkToHtmlFile2,
linkToJavadoc3,
linkToJavadoc4,
],
};
await handleBrokenLinks({
allCollectedLinks: allCollectedCorrectLinks,
onBrokenLinks: 'warn',
routes,
baseUrl: '/',
outDir,
});
expect(consoleMock).toHaveBeenCalledTimes(0);
});
it('reports all broken links', async () => {
await expect(() =>
handleBrokenLinks({
allCollectedLinks,
onBrokenLinks: 'throw',
routes,
baseUrl: '/',
outDir,
}),
).rejects.toThrowErrorMatchingSnapshot();
});
it('no-op for ignore', async () => {
// In any case, _.mapValues will always be called, unless handleBrokenLinks
// has already bailed
const lodashMock = jest.spyOn(_, 'mapValues');
await handleBrokenLinks({
allCollectedLinks,
onBrokenLinks: 'ignore',
routes,
baseUrl: '/',
outDir,
});
expect(lodashMock).toHaveBeenCalledTimes(0);
lodashMock.mockRestore();
});
it('reports frequent broken links', async () => {
Object.values(allCollectedLinks).forEach((links) =>
links.push(
'/frequent',
// This is in the gray area of what should be reported. Relative paths
// may be resolved to different slugs on different locations. But if
// this comes from a layout link, it should be reported anyways
'./maybe-not',
),
const collectedLinks: Params['collectedLinks'] = Object.fromEntries(
pagePaths.map((pagePath) => [
pagePath,
{
links: ['/frequentBrokenLink', './relativeFrequentBrokenLink'],
anchors: [],
},
]),
);
await expect(() =>
handleBrokenLinks({
allCollectedLinks,
onBrokenLinks: 'throw',
testBrokenLinks({
routes,
baseUrl: '/',
outDir,
collectedLinks,
}),
).rejects.toThrowErrorMatchingSnapshot();
).rejects.toThrowErrorMatchingInlineSnapshot(`
"Docusaurus found broken links!
Please check the pages of your site in the list below, and make sure you don't reference any path that does not exist.
Note: it's possible to ignore broken links with the 'onBrokenLinks' Docusaurus configuration, and let the build pass.
It looks like some of the broken links we found appear in many pages of your site.
Maybe those broken links appear on all pages through your site layout?
We recommend that you check your theme configuration for such links (particularly, theme navbar and footer).
Frequent broken links are linking to:
- /frequentBrokenLink
- ./relativeFrequentBrokenLink
Exhaustive list of all broken links found:
- Broken link on source page path = /page1:
-> linking to /frequentBrokenLink
-> linking to ./relativeFrequentBrokenLink (resolved as: /relativeFrequentBrokenLink)
- Broken link on source page path = /page2:
-> linking to /frequentBrokenLink
-> linking to ./relativeFrequentBrokenLink (resolved as: /relativeFrequentBrokenLink)
- Broken link on source page path = /dir/page3:
-> linking to /frequentBrokenLink
-> linking to ./relativeFrequentBrokenLink (resolved as: /dir/relativeFrequentBrokenLink)
- Broken link on source page path = /dir/page4:
-> linking to /frequentBrokenLink
-> linking to ./relativeFrequentBrokenLink (resolved as: /dir/relativeFrequentBrokenLink)
- Broken link on source page path = /dir/page5:
-> linking to /frequentBrokenLink
-> linking to ./relativeFrequentBrokenLink (resolved as: /dir/relativeFrequentBrokenLink)
"
`);
});
});

View file

@ -61,12 +61,17 @@ describe('normalizeConfig', () => {
markdown: {
format: 'md',
mermaid: true,
parseFrontMatter: async (params) =>
params.defaultParseFrontMatter(params),
preprocessor: ({fileContent}) => fileContent,
mdx1Compat: {
comments: true,
admonitions: false,
headingIds: true,
},
remarkRehypeOptions: {
footnoteLabel: 'Pied de page',
},
},
};
const normalizedConfig = normalizeConfig(userConfig);
@ -504,12 +509,19 @@ describe('markdown', () => {
const markdown: DocusaurusConfig['markdown'] = {
format: 'md',
mermaid: true,
parseFrontMatter: async (params) =>
params.defaultParseFrontMatter(params),
preprocessor: ({fileContent}) => fileContent,
mdx1Compat: {
comments: false,
admonitions: true,
headingIds: false,
},
remarkRehypeOptions: {
footnoteLabel: 'Notes de bas de page',
// @ts-expect-error: we don't validate it on purpose
anyKey: 'heck we accept it on purpose',
},
};
expect(
normalizeConfig({

View file

@ -5,45 +5,42 @@
* LICENSE file in the root directory of this source tree.
*/
import fs from 'fs-extra';
import path from 'path';
import _ from 'lodash';
import logger from '@docusaurus/logger';
import combinePromises from 'combine-promises';
import {matchRoutes} from 'react-router-config';
import {removePrefix, removeSuffix, resolvePathname} from '@docusaurus/utils';
import {parseURLPath, serializeURLPath, type URLPath} from '@docusaurus/utils';
import {getAllFinalRoutes} from './utils';
import type {RouteConfig, ReportingSeverity} from '@docusaurus/types';
type BrokenLink = {
link: string;
resolvedLink: string;
anchor: boolean;
};
// matchRoutes does not support qs/anchors, so we remove it!
function onlyPathname(link: string) {
return link.split('#')[0]!.split('?')[0]!;
}
type BrokenLinksMap = {[pathname: string]: BrokenLink[]};
function getPageBrokenLinks({
// The linking data that has been collected on Docusaurus pages during SSG
// {rendered page pathname => links and anchors collected on that page}
type CollectedLinks = {
[pathname: string]: {links: string[]; anchors: string[]};
};
function getBrokenLinksForPage({
collectedLinks,
pagePath,
pageLinks,
routes,
}: {
collectedLinks: CollectedLinks;
pagePath: string;
pageLinks: string[];
pageAnchors: string[];
routes: RouteConfig[];
}): BrokenLink[] {
// ReactRouter is able to support links like ./../somePath but `matchRoutes`
// does not do this resolution internally. We must resolve the links before
// using `matchRoutes`. `resolvePathname` is used internally by React Router
function resolveLink(link: string) {
const resolvedLink = resolvePathname(onlyPathname(link), pagePath);
return {link, resolvedLink};
}
function isBrokenLink(link: string) {
const matchedRoutes = [link, decodeURI(link)]
// console.log('routes:', routes);
function isPathBrokenLink(linkPath: URLPath) {
const matchedRoutes = [linkPath.pathname, decodeURI(linkPath.pathname)]
// @ts-expect-error: React router types RouteConfig with an actual React
// component, but we load route components with string paths.
// We don't actually access component here, so it's fine.
@ -52,7 +49,52 @@ function getPageBrokenLinks({
return matchedRoutes.length === 0;
}
return pageLinks.map(resolveLink).filter((l) => isBrokenLink(l.resolvedLink));
function isAnchorBrokenLink(linkPath: URLPath) {
const {pathname, hash} = linkPath;
// Link has no hash: it can't be a broken anchor link
if (hash === undefined) {
return false;
}
const targetPage =
collectedLinks[pathname] || collectedLinks[decodeURI(pathname)];
// link with anchor to a page that does not exist (or did not collect any
// link/anchor) is considered as a broken anchor
if (!targetPage) {
return true;
}
// it's a broken anchor if the target page exists
// but the anchor does not exist on that page
return !targetPage.anchors.includes(hash);
}
const brokenLinks = pageLinks.flatMap((link) => {
const linkPath = parseURLPath(link, pagePath);
if (isPathBrokenLink(linkPath)) {
return [
{
link,
resolvedLink: serializeURLPath(linkPath),
anchor: false,
},
];
}
if (isAnchorBrokenLink(linkPath)) {
return [
{
link,
resolvedLink: serializeURLPath(linkPath),
anchor: true,
},
];
}
return [];
});
return brokenLinks;
}
/**
@ -66,27 +108,24 @@ function filterIntermediateRoutes(routesInput: RouteConfig[]): RouteConfig[] {
return getAllFinalRoutes(routesWithout404);
}
function getAllBrokenLinks({
allCollectedLinks,
function getBrokenLinks({
collectedLinks,
routes,
}: {
allCollectedLinks: {[location: string]: string[]};
collectedLinks: CollectedLinks;
routes: RouteConfig[];
}): {[location: string]: BrokenLink[]} {
}): BrokenLinksMap {
const filteredRoutes = filterIntermediateRoutes(routes);
const allBrokenLinks = _.mapValues(allCollectedLinks, (pageLinks, pagePath) =>
getPageBrokenLinks({pageLinks, pagePath, routes: filteredRoutes}),
return _.mapValues(collectedLinks, (pageCollectedData, pagePath) =>
getBrokenLinksForPage({
collectedLinks,
pageLinks: pageCollectedData.links,
pageAnchors: pageCollectedData.anchors,
pagePath,
routes: filteredRoutes,
}),
);
return _.pickBy(allBrokenLinks, (brokenLinks) => brokenLinks.length > 0);
}
function getBrokenLinksErrorMessage(allBrokenLinks: {
[location: string]: BrokenLink[];
}): string | undefined {
if (Object.keys(allBrokenLinks).length === 0) {
return undefined;
}
function brokenLinkMessage(brokenLink: BrokenLink): string {
@ -96,15 +135,49 @@ function getBrokenLinksErrorMessage(allBrokenLinks: {
}`;
}
function pageBrokenLinksMessage(
function createBrokenLinksMessage(
pagePath: string,
brokenLinks: BrokenLink[],
): string {
return `
- On source page path = ${pagePath}:
const type = brokenLinks[0]?.anchor === true ? 'anchor' : 'link';
const anchorMessage =
brokenLinks.length > 0
? `- Broken ${type} on source page path = ${pagePath}:
-> linking to ${brokenLinks
.map(brokenLinkMessage)
.join('\n -> linking to ')}`;
.join('\n -> linking to ')}`
: '';
return `${anchorMessage}`;
}
function createBrokenAnchorsMessage(
brokenAnchors: BrokenLinksMap,
): string | undefined {
if (Object.keys(brokenAnchors).length === 0) {
return undefined;
}
return `Docusaurus found broken anchors!
Please check the pages of your site in the list below, and make sure you don't reference any anchor that does not exist.
Note: it's possible to ignore broken anchors with the 'onBrokenAnchors' Docusaurus configuration, and let the build pass.
Exhaustive list of all broken anchors found:
${Object.entries(brokenAnchors)
.map(([pagePath, brokenLinks]) =>
createBrokenLinksMessage(pagePath, brokenLinks),
)
.join('\n')}
`;
}
function createBrokenPathsMessage(
brokenPathsMap: BrokenLinksMap,
): string | undefined {
if (Object.keys(brokenPathsMap).length === 0) {
return undefined;
}
/**
@ -113,7 +186,7 @@ function getBrokenLinksErrorMessage(allBrokenLinks: {
* this out. See https://github.com/facebook/docusaurus/issues/3567#issuecomment-706973805
*/
function getLayoutBrokenLinksHelpMessage() {
const flatList = Object.entries(allBrokenLinks).flatMap(
const flatList = Object.entries(brokenPathsMap).flatMap(
([pagePage, brokenLinks]) =>
brokenLinks.map((brokenLink) => ({pagePage, brokenLink})),
);
@ -146,102 +219,78 @@ Please check the pages of your site in the list below, and make sure you don't r
Note: it's possible to ignore broken links with the 'onBrokenLinks' Docusaurus configuration, and let the build pass.${getLayoutBrokenLinksHelpMessage()}
Exhaustive list of all broken links found:
${Object.entries(allBrokenLinks)
.map(([pagePath, brokenLinks]) =>
pageBrokenLinksMessage(pagePath, brokenLinks),
${Object.entries(brokenPathsMap)
.map(([pagePath, brokenPaths]) =>
createBrokenLinksMessage(pagePath, brokenPaths),
)
.join('\n')}
`;
}
async function isExistingFile(filePath: string) {
try {
return (await fs.stat(filePath)).isFile();
} catch {
return false;
function splitBrokenLinks(brokenLinks: BrokenLinksMap): {
brokenPaths: BrokenLinksMap;
brokenAnchors: BrokenLinksMap;
} {
const brokenPaths: BrokenLinksMap = {};
const brokenAnchors: BrokenLinksMap = {};
Object.entries(brokenLinks).forEach(([pathname, pageBrokenLinks]) => {
const [anchorBrokenLinks, pathBrokenLinks] = _.partition(
pageBrokenLinks,
(link) => link.anchor,
);
if (pathBrokenLinks.length > 0) {
brokenPaths[pathname] = pathBrokenLinks;
}
if (anchorBrokenLinks.length > 0) {
brokenAnchors[pathname] = anchorBrokenLinks;
}
});
return {brokenPaths, brokenAnchors};
}
// If a file actually exist on the file system, we know the link is valid
// even if docusaurus does not know about this file, so we don't report it
async function filterExistingFileLinks({
baseUrl,
outDir,
allCollectedLinks,
function reportBrokenLinks({
brokenLinks,
onBrokenLinks,
onBrokenAnchors,
}: {
baseUrl: string;
outDir: string;
allCollectedLinks: {[location: string]: string[]};
}): Promise<{[location: string]: string[]}> {
async function linkFileExists(link: string) {
// /baseUrl/javadoc/ -> /outDir/javadoc
const baseFilePath = onlyPathname(
removeSuffix(`${outDir}/${removePrefix(link, baseUrl)}`, '/'),
);
brokenLinks: BrokenLinksMap;
onBrokenLinks: ReportingSeverity;
onBrokenAnchors: ReportingSeverity;
}) {
// We need to split the broken links reporting in 2 for better granularity
// This is because we need to report broken path/anchors independently
// For v3.x retro-compatibility, we can't throw by default for broken anchors
// TODO Docusaurus v4: make onBrokenAnchors throw by default?
const {brokenPaths, brokenAnchors} = splitBrokenLinks(brokenLinks);
// -> /outDir/javadoc
// -> /outDir/javadoc.html
// -> /outDir/javadoc/index.html
const filePathsToTry: string[] = [baseFilePath];
if (!path.extname(baseFilePath)) {
filePathsToTry.push(
`${baseFilePath}.html`,
path.join(baseFilePath, 'index.html'),
);
const pathErrorMessage = createBrokenPathsMessage(brokenPaths);
if (pathErrorMessage) {
logger.report(onBrokenLinks)(pathErrorMessage);
}
for (const file of filePathsToTry) {
if (await isExistingFile(file)) {
return true;
const anchorErrorMessage = createBrokenAnchorsMessage(brokenAnchors);
if (anchorErrorMessage) {
logger.report(onBrokenAnchors)(anchorErrorMessage);
}
}
return false;
}
return combinePromises(
_.mapValues(allCollectedLinks, async (links) =>
(
await Promise.all(
links.map(async (link) => ((await linkFileExists(link)) ? '' : link)),
)
).filter(Boolean),
),
);
}
export async function handleBrokenLinks({
allCollectedLinks,
collectedLinks,
onBrokenLinks,
onBrokenAnchors,
routes,
baseUrl,
outDir,
}: {
allCollectedLinks: {[location: string]: string[]};
collectedLinks: CollectedLinks;
onBrokenLinks: ReportingSeverity;
onBrokenAnchors: ReportingSeverity;
routes: RouteConfig[];
baseUrl: string;
outDir: string;
}): Promise<void> {
if (onBrokenLinks === 'ignore') {
if (onBrokenLinks === 'ignore' && onBrokenAnchors === 'ignore') {
return;
}
// If we link to a file like /myFile.zip, and the file actually exist for the
// file system. It is not a broken link, it may simply be a link to an
// existing static file...
const allCollectedLinksFiltered = await filterExistingFileLinks({
allCollectedLinks,
baseUrl,
outDir,
});
const allBrokenLinks = getAllBrokenLinks({
allCollectedLinks: allCollectedLinksFiltered,
routes,
});
const errorMessage = getBrokenLinksErrorMessage(allBrokenLinks);
if (errorMessage) {
logger.report(onBrokenLinks)(errorMessage);
}
const brokenLinks = getBrokenLinks({routes, collectedLinks});
reportBrokenLinks({brokenLinks, onBrokenLinks, onBrokenAnchors});
}

View file

@ -6,6 +6,7 @@
*/
import {
DEFAULT_PARSE_FRONT_MATTER,
DEFAULT_STATIC_DIR_NAME,
DEFAULT_I18N_DIR_NAME,
addLeadingSlash,
@ -13,7 +14,11 @@ import {
removeTrailingSlash,
} from '@docusaurus/utils';
import {Joi, printWarning} from '@docusaurus/utils-validation';
import type {DocusaurusConfig, I18nConfig} from '@docusaurus/types';
import type {
DocusaurusConfig,
I18nConfig,
MarkdownConfig,
} from '@docusaurus/types';
const DEFAULT_I18N_LOCALE = 'en';
@ -24,10 +29,24 @@ export const DEFAULT_I18N_CONFIG: I18nConfig = {
localeConfigs: {},
};
export const DEFAULT_MARKDOWN_CONFIG: MarkdownConfig = {
format: 'mdx', // TODO change this to "detect" in Docusaurus v4?
mermaid: false,
preprocessor: undefined,
parseFrontMatter: DEFAULT_PARSE_FRONT_MATTER,
mdx1Compat: {
comments: true,
admonitions: true,
headingIds: true,
},
remarkRehypeOptions: undefined,
};
export const DEFAULT_CONFIG: Pick<
DocusaurusConfig,
| 'i18n'
| 'onBrokenLinks'
| 'onBrokenAnchors'
| 'onBrokenMarkdownLinks'
| 'onDuplicateRoutes'
| 'plugins'
@ -48,6 +67,7 @@ export const DEFAULT_CONFIG: Pick<
> = {
i18n: DEFAULT_I18N_CONFIG,
onBrokenLinks: 'throw',
onBrokenAnchors: 'warn', // TODO Docusaurus v4: change to throw
onBrokenMarkdownLinks: 'warn',
onDuplicateRoutes: 'warn',
plugins: [],
@ -64,21 +84,11 @@ export const DEFAULT_CONFIG: Pick<
tagline: '',
baseUrlIssueBanner: true,
staticDirectories: [DEFAULT_STATIC_DIR_NAME],
markdown: {
format: 'mdx', // TODO change this to "detect" in Docusaurus v4?
mermaid: false,
preprocessor: undefined,
mdx1Compat: {
comments: true,
admonitions: true,
headingIds: true,
},
},
markdown: DEFAULT_MARKDOWN_CONFIG,
};
function createPluginSchema(theme: boolean) {
return (
Joi.alternatives()
return Joi.alternatives()
.try(
Joi.function(),
Joi.array()
@ -90,7 +100,6 @@ function createPluginSchema(theme: boolean) {
.length(2),
Joi.any().valid(false, null),
)
// @ts-expect-error: bad lib def, doesn't recognize an array of reports
.error((errors) => {
errors.forEach((error) => {
const validConfigExample = theme
@ -124,8 +133,7 @@ ${validConfigExample}
`;
});
return errors;
})
);
});
}
const PluginSchema = createPluginSchema(false);
@ -202,6 +210,9 @@ export const ConfigSchema = Joi.object<DocusaurusConfig>({
onBrokenLinks: Joi.string()
.equal('ignore', 'log', 'warn', 'throw')
.default(DEFAULT_CONFIG.onBrokenLinks),
onBrokenAnchors: Joi.string()
.equal('ignore', 'log', 'warn', 'throw')
.default(DEFAULT_CONFIG.onBrokenAnchors),
onBrokenMarkdownLinks: Joi.string()
.equal('ignore', 'log', 'warn', 'throw')
.default(DEFAULT_CONFIG.onBrokenMarkdownLinks),
@ -280,6 +291,9 @@ export const ConfigSchema = Joi.object<DocusaurusConfig>({
format: Joi.string()
.equal('mdx', 'md', 'detect')
.default(DEFAULT_CONFIG.markdown.format),
parseFrontMatter: Joi.function().default(
() => DEFAULT_CONFIG.markdown.parseFrontMatter,
),
mermaid: Joi.boolean().default(DEFAULT_CONFIG.markdown.mermaid),
preprocessor: Joi.function()
.arity(1)
@ -296,6 +310,11 @@ export const ConfigSchema = Joi.object<DocusaurusConfig>({
DEFAULT_CONFIG.markdown.mdx1Compat.headingIds,
),
}).default(DEFAULT_CONFIG.markdown.mdx1Compat),
remarkRehypeOptions:
// add proper external options validation?
// Not sure if it's a good idea, validation is likely to become stale
// See https://github.com/remarkjs/remark-rehype#options
Joi.object().unknown(),
}).default(DEFAULT_CONFIG.markdown),
}).messages({
'docusaurus.configValidationWarning':

View file

@ -16,6 +16,7 @@ exports[`base webpack config creates webpack aliases 1`] = `
"@docusaurus/renderRoutes": "../../../../client/exports/renderRoutes.ts",
"@docusaurus/router": "../../../../client/exports/router.ts",
"@docusaurus/useBaseUrl": "../../../../client/exports/useBaseUrl.ts",
"@docusaurus/useBrokenLinks": "../../../../client/exports/useBrokenLinks.ts",
"@docusaurus/useDocusaurusContext": "../../../../client/exports/useDocusaurusContext.ts",
"@docusaurus/useGlobalData": "../../../../client/exports/useGlobalData.ts",
"@docusaurus/useIsBrowser": "../../../../client/exports/useIsBrowser.ts",

View file

@ -16,6 +16,7 @@ exports[`getDocusaurusAliases returns appropriate webpack aliases 1`] = `
"@docusaurus/renderRoutes": "<PROJECT_ROOT>/packages/docusaurus/src/client/exports/renderRoutes.ts",
"@docusaurus/router": "<PROJECT_ROOT>/packages/docusaurus/src/client/exports/router.ts",
"@docusaurus/useBaseUrl": "<PROJECT_ROOT>/packages/docusaurus/src/client/exports/useBaseUrl.ts",
"@docusaurus/useBrokenLinks": "<PROJECT_ROOT>/packages/docusaurus/src/client/exports/useBrokenLinks.ts",
"@docusaurus/useDocusaurusContext": "<PROJECT_ROOT>/packages/docusaurus/src/client/exports/useDocusaurusContext.ts",
"@docusaurus/useGlobalData": "<PROJECT_ROOT>/packages/docusaurus/src/client/exports/useGlobalData.ts",
"@docusaurus/useIsBrowser": "<PROJECT_ROOT>/packages/docusaurus/src/client/exports/useIsBrowser.ts",

View file

@ -1,6 +1,6 @@
{
"name": "@docusaurus/eslint-plugin",
"version": "3.0.1",
"version": "3.1.0",
"description": "ESLint plugin to enforce best Docusaurus practices.",
"main": "lib/index.js",
"keywords": [

View file

@ -1,6 +1,6 @@
{
"name": "@docusaurus/lqip-loader",
"version": "3.0.1",
"version": "3.1.0",
"description": "Low Quality Image Placeholders (LQIP) loader for webpack.",
"main": "lib/index.js",
"publishConfig": {
@ -17,7 +17,7 @@
},
"license": "MIT",
"dependencies": {
"@docusaurus/logger": "3.0.1",
"@docusaurus/logger": "3.1.0",
"file-loader": "^6.2.0",
"lodash": "^4.17.21",
"sharp": "^0.32.3",

View file

@ -1,6 +1,6 @@
{
"name": "stylelint-copyright",
"version": "3.0.1",
"version": "3.1.0",
"description": "Stylelint plugin to check CSS files for a copyright header.",
"main": "lib/index.js",
"license": "MIT",

Some files were not shown because too many files have changed in this diff Show more