mirror of
https://github.com/facebook/docusaurus.git
synced 2025-05-24 06:27:02 +02:00
feat(v2): allow specifying TOC max depth (themeConfig + frontMatter) (#5578)
* feat: add all TOC levels to MDX loader * feat: add theme-level config for heading depth * test: add remark MDX loader test * fix: limit maxDepth validation to H2 - H6 * refactor: set default `maxDepth` using `joi` * refactor: `maxDepth` -> `maxHeadingLevel * refactor: invert underlying TOC depth API * refactor: make TOC algorithm level-aware * feat: add support for per-doc TOC heading levels * feat: support document-level heading levels for blog * fix: correct validation for toc level frontmatter * fix: ensure TOC doesn't generate redundant DOM * perf: simpler TOC heading search alg * docs: document heading level props for `TOCInline` * Update website/docs/guides/markdown-features/markdown-features-inline-toc.mdx Co-authored-by: HonkingGoose <34918129+HonkingGoose@users.noreply.github.com> * docs: fix docs (again) * create dedicated test file for heading searching logic: exhaustive tests will be simpler to write * toc search: add real-world test * fix test * add dogfooding tests for toc min/max * add test for min/max toc frontmatter * reverse min/max order * add theme minHeadingLevel + tests * simpler TOC rendering logic * simplify TOC implementation (temp, WIP) * reverse unnatural order for minHeadingLevel/maxHeadingLevel * add TOC dogfooding tests to all content plugins * expose toc min/max heading level frontmatter to all 3 content plugins * refactor blogLayout: accept toc ReactElement directly * move toc utils to theme-common * add tests for filterTOC * create new generic TOCItems component * useless css file copied * fix toc highlighting className conflicts * update doc * fix types Co-authored-by: HonkingGoose <34918129+HonkingGoose@users.noreply.github.com> Co-authored-by: slorber <lorber.sebastien@gmail.com>
This commit is contained in:
parent
caba1e4908
commit
c86dfbda61
50 changed files with 1522 additions and 214 deletions
|
@ -9,24 +9,29 @@ exports[`inline code should be escaped 1`] = `
|
|||
{
|
||||
value: '<code><Head>Test</Head></code>',
|
||||
id: 'headtesthead',
|
||||
children: []
|
||||
children: [],
|
||||
level: 3
|
||||
}
|
||||
]
|
||||
],
|
||||
level: 2
|
||||
},
|
||||
{
|
||||
value: '<code><div /></code>',
|
||||
id: 'div-',
|
||||
children: []
|
||||
children: [],
|
||||
level: 2
|
||||
},
|
||||
{
|
||||
value: '<code><div> Test </div></code>',
|
||||
id: 'div-test-div',
|
||||
children: []
|
||||
children: [],
|
||||
level: 2
|
||||
},
|
||||
{
|
||||
value: '<code><div><i>Test</i></div></code>',
|
||||
id: 'divitestidiv',
|
||||
children: []
|
||||
children: [],
|
||||
level: 2
|
||||
}
|
||||
];
|
||||
|
||||
|
@ -51,24 +56,29 @@ exports[`non text phrasing content 1`] = `
|
|||
{
|
||||
value: '<strong>Importance</strong>',
|
||||
id: 'importance',
|
||||
children: []
|
||||
children: [],
|
||||
level: 3
|
||||
}
|
||||
]
|
||||
],
|
||||
level: 2
|
||||
},
|
||||
{
|
||||
value: '<del>Strikethrough</del>',
|
||||
id: 'strikethrough',
|
||||
children: []
|
||||
children: [],
|
||||
level: 2
|
||||
},
|
||||
{
|
||||
value: '<i>HTML</i>',
|
||||
id: 'html',
|
||||
children: []
|
||||
children: [],
|
||||
level: 2
|
||||
},
|
||||
{
|
||||
value: '<code>inline.code()</code>',
|
||||
id: 'inlinecode',
|
||||
children: []
|
||||
children: [],
|
||||
level: 2
|
||||
}
|
||||
];
|
||||
|
||||
|
|
|
@ -12,7 +12,7 @@ import vfile from 'to-vfile';
|
|||
import plugin from '../index';
|
||||
import headings from '../../headings/index';
|
||||
|
||||
const processFixture = async (name, options) => {
|
||||
const processFixture = async (name, options?) => {
|
||||
const path = join(__dirname, 'fixtures', `${name}.md`);
|
||||
const file = await vfile.read(path);
|
||||
const result = await remark()
|
||||
|
@ -41,7 +41,8 @@ test('text content', async () => {
|
|||
{
|
||||
value: 'Endi',
|
||||
id: 'endi',
|
||||
children: []
|
||||
children: [],
|
||||
level: 3
|
||||
},
|
||||
{
|
||||
value: 'Endi',
|
||||
|
@ -50,14 +51,17 @@ test('text content', async () => {
|
|||
{
|
||||
value: 'Yangshun',
|
||||
id: 'yangshun',
|
||||
children: []
|
||||
children: [],
|
||||
level: 3
|
||||
}
|
||||
]
|
||||
],
|
||||
level: 2
|
||||
},
|
||||
{
|
||||
value: 'I ♥ unicode.',
|
||||
id: 'i--unicode',
|
||||
children: []
|
||||
children: [],
|
||||
level: 2
|
||||
}
|
||||
];
|
||||
|
||||
|
@ -87,7 +91,8 @@ test('should export even with existing name', async () => {
|
|||
{
|
||||
value: 'Thanos',
|
||||
id: 'thanos',
|
||||
children: []
|
||||
children: [],
|
||||
level: 2
|
||||
},
|
||||
{
|
||||
value: 'Tony Stark',
|
||||
|
@ -96,9 +101,11 @@ test('should export even with existing name', async () => {
|
|||
{
|
||||
value: 'Avengers',
|
||||
id: 'avengers',
|
||||
children: []
|
||||
children: [],
|
||||
level: 3
|
||||
}
|
||||
]
|
||||
],
|
||||
level: 2
|
||||
}
|
||||
];
|
||||
|
||||
|
@ -121,7 +128,8 @@ test('should export with custom name', async () => {
|
|||
{
|
||||
value: 'Endi',
|
||||
id: 'endi',
|
||||
children: []
|
||||
children: [],
|
||||
level: 3
|
||||
},
|
||||
{
|
||||
value: 'Endi',
|
||||
|
@ -130,14 +138,17 @@ test('should export with custom name', async () => {
|
|||
{
|
||||
value: 'Yangshun',
|
||||
id: 'yangshun',
|
||||
children: []
|
||||
children: [],
|
||||
level: 3
|
||||
}
|
||||
]
|
||||
],
|
||||
level: 2
|
||||
},
|
||||
{
|
||||
value: 'I ♥ unicode.',
|
||||
id: 'i--unicode',
|
||||
children: []
|
||||
children: [],
|
||||
level: 2
|
||||
}
|
||||
];
|
||||
|
||||
|
@ -171,7 +182,8 @@ test('should insert below imports', async () => {
|
|||
{
|
||||
value: 'Title',
|
||||
id: 'title',
|
||||
children: []
|
||||
children: [],
|
||||
level: 2
|
||||
},
|
||||
{
|
||||
value: 'Test',
|
||||
|
@ -180,9 +192,11 @@ test('should insert below imports', async () => {
|
|||
{
|
||||
value: 'Again',
|
||||
id: 'again',
|
||||
children: []
|
||||
children: [],
|
||||
level: 3
|
||||
}
|
||||
]
|
||||
],
|
||||
level: 2
|
||||
}
|
||||
];
|
||||
|
||||
|
|
|
@ -0,0 +1,182 @@
|
|||
/**
|
||||
* Copyright (c) Facebook, Inc. and its affiliates.
|
||||
*
|
||||
* This source code is licensed under the MIT license found in the
|
||||
* LICENSE file in the root directory of this source tree.
|
||||
*/
|
||||
|
||||
import remark from 'remark';
|
||||
import mdx from 'remark-mdx';
|
||||
import search from '../search';
|
||||
import headings from '../../headings/index';
|
||||
|
||||
const getHeadings = async (mdText: string) => {
|
||||
const node = remark().parse(mdText);
|
||||
const result = await remark().use(headings).use(mdx).run(node);
|
||||
return search(result);
|
||||
};
|
||||
|
||||
test('should process all heading levels', async () => {
|
||||
const md = `
|
||||
# Alpha
|
||||
|
||||
## Bravo
|
||||
|
||||
### Charlie
|
||||
|
||||
#### Delta
|
||||
|
||||
##### Echo
|
||||
|
||||
###### Foxtrot
|
||||
|
||||
`;
|
||||
|
||||
expect(await getHeadings(md)).toEqual([
|
||||
{
|
||||
children: [
|
||||
{
|
||||
children: [
|
||||
{
|
||||
children: [
|
||||
{
|
||||
children: [
|
||||
{
|
||||
children: [],
|
||||
id: 'foxtrot',
|
||||
level: 6,
|
||||
value: 'Foxtrot',
|
||||
},
|
||||
],
|
||||
id: 'echo',
|
||||
level: 5,
|
||||
value: 'Echo',
|
||||
},
|
||||
],
|
||||
id: 'delta',
|
||||
level: 4,
|
||||
value: 'Delta',
|
||||
},
|
||||
],
|
||||
id: 'charlie',
|
||||
level: 3,
|
||||
value: 'Charlie',
|
||||
},
|
||||
],
|
||||
id: 'bravo',
|
||||
level: 2,
|
||||
value: 'Bravo',
|
||||
},
|
||||
]);
|
||||
});
|
||||
|
||||
test('should process real-world well-formatted md', async () => {
|
||||
const md = `
|
||||
# title
|
||||
|
||||
some text
|
||||
|
||||
## section 1
|
||||
|
||||
some text
|
||||
|
||||
### subsection 1-1
|
||||
|
||||
some text
|
||||
|
||||
#### subsection 1-1-1
|
||||
|
||||
some text
|
||||
|
||||
#### subsection 1-1-2
|
||||
|
||||
some text
|
||||
|
||||
### subsection 1-2
|
||||
|
||||
some text
|
||||
|
||||
### subsection 1-3
|
||||
|
||||
some text
|
||||
|
||||
## section 2
|
||||
|
||||
some text
|
||||
|
||||
### subsection 2-1
|
||||
|
||||
some text
|
||||
|
||||
### subsection 2-1
|
||||
|
||||
some text
|
||||
|
||||
## section 3
|
||||
|
||||
some text
|
||||
|
||||
### subsection 3-1
|
||||
|
||||
some text
|
||||
|
||||
### subsection 3-2
|
||||
|
||||
some text
|
||||
|
||||
`;
|
||||
|
||||
expect(await getHeadings(md)).toEqual([
|
||||
{
|
||||
children: [
|
||||
{
|
||||
children: [
|
||||
{
|
||||
children: [],
|
||||
id: 'subsection-1-1-1',
|
||||
level: 4,
|
||||
value: 'subsection 1-1-1',
|
||||
},
|
||||
{
|
||||
children: [],
|
||||
id: 'subsection-1-1-2',
|
||||
level: 4,
|
||||
value: 'subsection 1-1-2',
|
||||
},
|
||||
],
|
||||
id: 'subsection-1-1',
|
||||
level: 3,
|
||||
value: 'subsection 1-1',
|
||||
},
|
||||
{children: [], id: 'subsection-1-2', level: 3, value: 'subsection 1-2'},
|
||||
{children: [], id: 'subsection-1-3', level: 3, value: 'subsection 1-3'},
|
||||
],
|
||||
id: 'section-1',
|
||||
level: 2,
|
||||
value: 'section 1',
|
||||
},
|
||||
{
|
||||
children: [
|
||||
{children: [], id: 'subsection-2-1', level: 3, value: 'subsection 2-1'},
|
||||
{
|
||||
children: [],
|
||||
id: 'subsection-2-1-1',
|
||||
level: 3,
|
||||
value: 'subsection 2-1',
|
||||
},
|
||||
],
|
||||
id: 'section-2',
|
||||
level: 2,
|
||||
value: 'section 2',
|
||||
},
|
||||
{
|
||||
children: [
|
||||
{children: [], id: 'subsection-3-1', level: 3, value: 'subsection 3-1'},
|
||||
{children: [], id: 'subsection-3-2', level: 3, value: 'subsection 3-2'},
|
||||
],
|
||||
id: 'section-3',
|
||||
level: 2,
|
||||
value: 'section 3',
|
||||
},
|
||||
]);
|
||||
});
|
|
@ -8,40 +8,75 @@
|
|||
import toString from 'mdast-util-to-string';
|
||||
import visit, {Visitor} from 'unist-util-visit';
|
||||
import {toValue} from '../utils';
|
||||
import type {TOCItem as TOC} from '@docusaurus/types';
|
||||
import type {TOCItem} from '@docusaurus/types';
|
||||
import type {Node} from 'unist';
|
||||
import type {Heading} from 'mdast';
|
||||
|
||||
// Visit all headings. We `slug` all headings (to account for
|
||||
// duplicates), but only take h2 and h3 headings.
|
||||
export default function search(node: Node): TOC[] {
|
||||
const headings: TOC[] = [];
|
||||
let current = -1;
|
||||
let currentDepth = 0;
|
||||
// Intermediate interface for TOC algorithm
|
||||
interface SearchItem {
|
||||
node: TOCItem;
|
||||
level: number;
|
||||
parentIndex: number;
|
||||
}
|
||||
|
||||
/**
|
||||
*
|
||||
* Generate a TOC AST from the raw Markdown contents
|
||||
*/
|
||||
export default function search(node: Node): TOCItem[] {
|
||||
const headings: SearchItem[] = [];
|
||||
|
||||
const visitor: Visitor<Heading> = (child, _index, parent) => {
|
||||
const value = toString(child);
|
||||
|
||||
if (parent !== node || !value || child.depth > 3 || child.depth < 2) {
|
||||
// depth:1 headings are titles and not included in the TOC
|
||||
if (parent !== node || !value || child.depth < 2) {
|
||||
return;
|
||||
}
|
||||
|
||||
const entry: TOC = {
|
||||
value: toValue(child),
|
||||
id: child.data!.id as string,
|
||||
children: [],
|
||||
};
|
||||
|
||||
if (!headings.length || currentDepth >= child.depth) {
|
||||
headings.push(entry);
|
||||
current += 1;
|
||||
currentDepth = child.depth;
|
||||
} else {
|
||||
headings[current].children.push(entry);
|
||||
}
|
||||
headings.push({
|
||||
node: {
|
||||
value: toValue(child),
|
||||
id: child.data!.id as string,
|
||||
children: [],
|
||||
level: child.depth,
|
||||
},
|
||||
level: child.depth,
|
||||
parentIndex: -1,
|
||||
});
|
||||
};
|
||||
|
||||
visit(node, 'heading', visitor);
|
||||
|
||||
return headings;
|
||||
// Keep track of which previous index would be the current heading's direcy parent.
|
||||
// Each entry <i> is the last index of the `headings` array at heading level <i>.
|
||||
// We will modify these indices as we iterate through all headings.
|
||||
// e.g. if an ### H3 was last seen at index 2, then prevIndexForLevel[3] === 2
|
||||
// indices 0 and 1 will remain unused.
|
||||
const prevIndexForLevel = Array(7).fill(-1);
|
||||
|
||||
headings.forEach((curr, currIndex) => {
|
||||
// take the last seen index for each ancestor level. the highest
|
||||
// index will be the direct ancestor of the current heading.
|
||||
const ancestorLevelIndexes = prevIndexForLevel.slice(2, curr.level);
|
||||
curr.parentIndex = Math.max(...ancestorLevelIndexes);
|
||||
// mark that curr.level was last seen at the current index
|
||||
prevIndexForLevel[curr.level] = currIndex;
|
||||
});
|
||||
|
||||
const rootNodeIndexes: number[] = [];
|
||||
|
||||
// For a given parentIndex, add each Node into that parent's `children` array
|
||||
headings.forEach((heading, i) => {
|
||||
if (heading.parentIndex >= 0) {
|
||||
headings[heading.parentIndex].node.children.push(heading.node);
|
||||
} else {
|
||||
rootNodeIndexes.push(i);
|
||||
}
|
||||
});
|
||||
|
||||
const toc = headings
|
||||
.filter((_, k) => rootNodeIndexes.includes(k)) // only return root nodes
|
||||
.map((heading) => heading.node); // only return Node, no metadata
|
||||
return toc;
|
||||
}
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue