mirror of
https://github.com/facebook/docusaurus.git
synced 2025-07-16 00:06:11 +02:00
feat: doc tags (same as blog tags) (#3646)
* [v2] tags to doc, same as tags to blog - [IN PROGRESS] - Addition of plugin-content-docs - Addition of DocTagsListPage in `docusaurus-theme-classic` ! Error exists for this commit towards the theme aspect and help required. Commit towards #3434 * docs: make tags list page work * temp: disable onBrokenLinks * theme bootstrap: create DocTagsListPage * DocTagsPage added and functionality too - individual doc tag page added to show docs for that specific tag * Added all Docs Tags Link * add some shared tag utils * move tag tests to _dogfooding * fix type * fix some tests * fix blog test * refactor blog post tags handling * better yaml tag examples * better dogfood md files * refactor and factorize theme tag components * finish DocTagDocListPage * Extract DocItemFooter + add inline tag list * minor fix * better typings * fix versions.test.ts tests * add tests for doc tags * fix tests * test toTagDocListProp * move shared theme code to tagUtils * Add new theme translation keys * move common theme code to tagUtils + add tests * update-code-translations should handle theme-common * update french translation * revert add translation * fix pluralization problem in theme.docs.tagDocListPageTitle * add theme component configuration options * add more tags tests * add documentation for docs tagging Co-authored-by: slorber <lorber.sebastien@gmail.com>
This commit is contained in:
parent
f666de7e59
commit
f9c79cbd58
81 changed files with 1874 additions and 381 deletions
|
@ -12,7 +12,6 @@ import {
|
|||
genChunkName,
|
||||
idx,
|
||||
getSubFolder,
|
||||
normalizeUrl,
|
||||
posixPath,
|
||||
objectWithKeySorted,
|
||||
aliasedSitePath,
|
||||
|
@ -218,113 +217,6 @@ describe('load utils', () => {
|
|||
expect(getSubFolder(testE, 'docs')).toBeNull();
|
||||
});
|
||||
|
||||
test('normalizeUrl', () => {
|
||||
const asserts = [
|
||||
{
|
||||
input: ['/', ''],
|
||||
output: '/',
|
||||
},
|
||||
{
|
||||
input: ['', '/'],
|
||||
output: '/',
|
||||
},
|
||||
{
|
||||
input: ['/'],
|
||||
output: '/',
|
||||
},
|
||||
{
|
||||
input: [''],
|
||||
output: '',
|
||||
},
|
||||
{
|
||||
input: ['/', '/'],
|
||||
output: '/',
|
||||
},
|
||||
{
|
||||
input: ['/', 'docs'],
|
||||
output: '/docs',
|
||||
},
|
||||
{
|
||||
input: ['/', 'docs', 'en', 'next', 'blog'],
|
||||
output: '/docs/en/next/blog',
|
||||
},
|
||||
{
|
||||
input: ['/test/', '/docs', 'ro', 'doc1'],
|
||||
output: '/test/docs/ro/doc1',
|
||||
},
|
||||
{
|
||||
input: ['/test/', '/', 'ro', 'doc1'],
|
||||
output: '/test/ro/doc1',
|
||||
},
|
||||
{
|
||||
input: ['/', '/', '2020/02/29/leap-day'],
|
||||
output: '/2020/02/29/leap-day',
|
||||
},
|
||||
{
|
||||
input: ['', '/', 'ko', 'hello'],
|
||||
output: '/ko/hello',
|
||||
},
|
||||
{
|
||||
input: ['hello', 'world'],
|
||||
output: 'hello/world',
|
||||
},
|
||||
{
|
||||
input: ['http://www.google.com/', 'foo/bar', '?test=123'],
|
||||
output: 'http://www.google.com/foo/bar?test=123',
|
||||
},
|
||||
{
|
||||
input: ['http:', 'www.google.com///', 'foo/bar', '?test=123'],
|
||||
output: 'http://www.google.com/foo/bar?test=123',
|
||||
},
|
||||
{
|
||||
input: ['http://foobar.com', '', 'test'],
|
||||
output: 'http://foobar.com/test',
|
||||
},
|
||||
{
|
||||
input: ['http://foobar.com', '', 'test', '/'],
|
||||
output: 'http://foobar.com/test/',
|
||||
},
|
||||
{
|
||||
input: ['/', '', 'hello', '', '/', '/', '', '/', '/world'],
|
||||
output: '/hello/world',
|
||||
},
|
||||
{
|
||||
input: ['', '', '/tt', 'ko', 'hello'],
|
||||
output: '/tt/ko/hello',
|
||||
},
|
||||
{
|
||||
input: ['', '///hello///', '', '///world'],
|
||||
output: '/hello/world',
|
||||
},
|
||||
{
|
||||
input: ['', '/hello/', ''],
|
||||
output: '/hello/',
|
||||
},
|
||||
{
|
||||
input: ['', '/', ''],
|
||||
output: '/',
|
||||
},
|
||||
{
|
||||
input: ['///', '///'],
|
||||
output: '/',
|
||||
},
|
||||
{
|
||||
input: ['/', '/hello/world/', '///'],
|
||||
output: '/hello/world/',
|
||||
},
|
||||
];
|
||||
asserts.forEach((testCase) => {
|
||||
expect(normalizeUrl(testCase.input)).toBe(testCase.output);
|
||||
});
|
||||
|
||||
expect(() =>
|
||||
// @ts-expect-error undefined for test
|
||||
normalizeUrl(['http:example.com', undefined]),
|
||||
).toThrowErrorMatchingInlineSnapshot(
|
||||
`"Url must be a string. Received undefined"`,
|
||||
);
|
||||
});
|
||||
|
||||
test('isValidPathname', () => {
|
||||
expect(isValidPathname('/')).toBe(true);
|
||||
expect(isValidPathname('/hey')).toBe(true);
|
||||
|
|
117
packages/docusaurus-utils/src/__tests__/normalizeUrl.test.ts
Normal file
117
packages/docusaurus-utils/src/__tests__/normalizeUrl.test.ts
Normal file
|
@ -0,0 +1,117 @@
|
|||
/**
|
||||
* Copyright (c) Facebook, Inc. and its affiliates.
|
||||
*
|
||||
* This source code is licensed under the MIT license found in the
|
||||
* LICENSE file in the root directory of this source tree.
|
||||
*/
|
||||
|
||||
import {normalizeUrl} from '../normalizeUrl';
|
||||
|
||||
describe('normalizeUrl', () => {
|
||||
test('should normalize urls correctly', () => {
|
||||
const asserts = [
|
||||
{
|
||||
input: ['/', ''],
|
||||
output: '/',
|
||||
},
|
||||
{
|
||||
input: ['', '/'],
|
||||
output: '/',
|
||||
},
|
||||
{
|
||||
input: ['/'],
|
||||
output: '/',
|
||||
},
|
||||
{
|
||||
input: [''],
|
||||
output: '',
|
||||
},
|
||||
{
|
||||
input: ['/', '/'],
|
||||
output: '/',
|
||||
},
|
||||
{
|
||||
input: ['/', 'docs'],
|
||||
output: '/docs',
|
||||
},
|
||||
{
|
||||
input: ['/', 'docs', 'en', 'next', 'blog'],
|
||||
output: '/docs/en/next/blog',
|
||||
},
|
||||
{
|
||||
input: ['/test/', '/docs', 'ro', 'doc1'],
|
||||
output: '/test/docs/ro/doc1',
|
||||
},
|
||||
{
|
||||
input: ['/test/', '/', 'ro', 'doc1'],
|
||||
output: '/test/ro/doc1',
|
||||
},
|
||||
{
|
||||
input: ['/', '/', '2020/02/29/leap-day'],
|
||||
output: '/2020/02/29/leap-day',
|
||||
},
|
||||
{
|
||||
input: ['', '/', 'ko', 'hello'],
|
||||
output: '/ko/hello',
|
||||
},
|
||||
{
|
||||
input: ['hello', 'world'],
|
||||
output: 'hello/world',
|
||||
},
|
||||
{
|
||||
input: ['http://www.google.com/', 'foo/bar', '?test=123'],
|
||||
output: 'http://www.google.com/foo/bar?test=123',
|
||||
},
|
||||
{
|
||||
input: ['http:', 'www.google.com///', 'foo/bar', '?test=123'],
|
||||
output: 'http://www.google.com/foo/bar?test=123',
|
||||
},
|
||||
{
|
||||
input: ['http://foobar.com', '', 'test'],
|
||||
output: 'http://foobar.com/test',
|
||||
},
|
||||
{
|
||||
input: ['http://foobar.com', '', 'test', '/'],
|
||||
output: 'http://foobar.com/test/',
|
||||
},
|
||||
{
|
||||
input: ['/', '', 'hello', '', '/', '/', '', '/', '/world'],
|
||||
output: '/hello/world',
|
||||
},
|
||||
{
|
||||
input: ['', '', '/tt', 'ko', 'hello'],
|
||||
output: '/tt/ko/hello',
|
||||
},
|
||||
{
|
||||
input: ['', '///hello///', '', '///world'],
|
||||
output: '/hello/world',
|
||||
},
|
||||
{
|
||||
input: ['', '/hello/', ''],
|
||||
output: '/hello/',
|
||||
},
|
||||
{
|
||||
input: ['', '/', ''],
|
||||
output: '/',
|
||||
},
|
||||
{
|
||||
input: ['///', '///'],
|
||||
output: '/',
|
||||
},
|
||||
{
|
||||
input: ['/', '/hello/world/', '///'],
|
||||
output: '/hello/world/',
|
||||
},
|
||||
];
|
||||
asserts.forEach((testCase) => {
|
||||
expect(normalizeUrl(testCase.input)).toBe(testCase.output);
|
||||
});
|
||||
|
||||
expect(() =>
|
||||
// @ts-expect-error undefined for test
|
||||
normalizeUrl(['http:example.com', undefined]),
|
||||
).toThrowErrorMatchingInlineSnapshot(
|
||||
`"Url must be a string. Received undefined"`,
|
||||
);
|
||||
});
|
||||
});
|
183
packages/docusaurus-utils/src/__tests__/tags.test.ts
Normal file
183
packages/docusaurus-utils/src/__tests__/tags.test.ts
Normal file
|
@ -0,0 +1,183 @@
|
|||
/**
|
||||
* Copyright (c) Facebook, Inc. and its affiliates.
|
||||
*
|
||||
* This source code is licensed under the MIT license found in the
|
||||
* LICENSE file in the root directory of this source tree.
|
||||
*/
|
||||
|
||||
import {
|
||||
normalizeFrontMatterTag,
|
||||
normalizeFrontMatterTags,
|
||||
groupTaggedItems,
|
||||
Tag,
|
||||
} from '../tags';
|
||||
|
||||
describe('normalizeFrontMatterTag', () => {
|
||||
type Input = Parameters<typeof normalizeFrontMatterTag>[1];
|
||||
type Output = ReturnType<typeof normalizeFrontMatterTag>;
|
||||
|
||||
test('should normalize simple string tag', () => {
|
||||
const tagsPath = '/all/tags';
|
||||
const input: Input = 'tag';
|
||||
const expectedOutput: Output = {
|
||||
label: 'tag',
|
||||
permalink: `${tagsPath}/tag`,
|
||||
};
|
||||
expect(normalizeFrontMatterTag(tagsPath, input)).toEqual(expectedOutput);
|
||||
});
|
||||
|
||||
test('should normalize complex string tag', () => {
|
||||
const tagsPath = '/all/tags';
|
||||
const input: Input = 'some more Complex_tag';
|
||||
const expectedOutput: Output = {
|
||||
label: 'some more Complex_tag',
|
||||
permalink: `${tagsPath}/some-more-complex-tag`,
|
||||
};
|
||||
expect(normalizeFrontMatterTag(tagsPath, input)).toEqual(expectedOutput);
|
||||
});
|
||||
|
||||
test('should normalize simple object tag', () => {
|
||||
const tagsPath = '/all/tags';
|
||||
const input: Input = {label: 'tag', permalink: 'tagPermalink'};
|
||||
const expectedOutput: Output = {
|
||||
label: 'tag',
|
||||
permalink: `${tagsPath}/tagPermalink`,
|
||||
};
|
||||
expect(normalizeFrontMatterTag(tagsPath, input)).toEqual(expectedOutput);
|
||||
});
|
||||
|
||||
test('should normalize complex string tag', () => {
|
||||
const tagsPath = '/all/tags';
|
||||
const input: Input = {
|
||||
label: 'tag complex Label',
|
||||
permalink: '/MoreComplex/Permalink',
|
||||
};
|
||||
const expectedOutput: Output = {
|
||||
label: 'tag complex Label',
|
||||
permalink: `${tagsPath}/MoreComplex/Permalink`,
|
||||
};
|
||||
expect(normalizeFrontMatterTag(tagsPath, input)).toEqual(expectedOutput);
|
||||
});
|
||||
});
|
||||
|
||||
describe('normalizeFrontMatterTags', () => {
|
||||
type Input = Parameters<typeof normalizeFrontMatterTags>[1];
|
||||
type Output = ReturnType<typeof normalizeFrontMatterTags>;
|
||||
|
||||
test('should normalize string list', () => {
|
||||
const tagsPath = '/all/tags';
|
||||
const input: Input = ['tag 1', 'tag-1', 'tag 3', 'tag1', 'tag-2'];
|
||||
// Keep user input order but remove tags that lead to same permalink
|
||||
const expectedOutput: Output = [
|
||||
{
|
||||
label: 'tag 1',
|
||||
permalink: `${tagsPath}/tag-1`,
|
||||
},
|
||||
{
|
||||
label: 'tag 3',
|
||||
permalink: `${tagsPath}/tag-3`,
|
||||
},
|
||||
{
|
||||
label: 'tag-2',
|
||||
permalink: `${tagsPath}/tag-2`,
|
||||
},
|
||||
];
|
||||
expect(normalizeFrontMatterTags(tagsPath, input)).toEqual(expectedOutput);
|
||||
});
|
||||
|
||||
test('should normalize complex mixed list', () => {
|
||||
const tagsPath = '/all/tags';
|
||||
const input: Input = [
|
||||
'tag 1',
|
||||
{label: 'tag-1', permalink: '/tag-1'},
|
||||
'tag 3',
|
||||
'tag1',
|
||||
{label: 'tag 4', permalink: '/tag4Permalink'},
|
||||
];
|
||||
// Keep user input order but remove tags that lead to same permalink
|
||||
const expectedOutput: Output = [
|
||||
{
|
||||
label: 'tag 1',
|
||||
permalink: `${tagsPath}/tag-1`,
|
||||
},
|
||||
{
|
||||
label: 'tag 3',
|
||||
permalink: `${tagsPath}/tag-3`,
|
||||
},
|
||||
{
|
||||
label: 'tag 4',
|
||||
permalink: `${tagsPath}/tag4Permalink`,
|
||||
},
|
||||
];
|
||||
expect(normalizeFrontMatterTags(tagsPath, input)).toEqual(expectedOutput);
|
||||
});
|
||||
});
|
||||
|
||||
describe('groupTaggedItems', () => {
|
||||
type SomeTaggedItem = {
|
||||
id: string;
|
||||
nested: {
|
||||
tags: Tag[];
|
||||
};
|
||||
};
|
||||
function groupItems(items: SomeTaggedItem[]) {
|
||||
return groupTaggedItems(items, (item) => item.nested.tags);
|
||||
}
|
||||
|
||||
type Input = Parameters<typeof groupItems>[0];
|
||||
type Output = ReturnType<typeof groupItems>;
|
||||
|
||||
test('should group items by tag permalink', () => {
|
||||
const tagGuide = {label: 'Guide', permalink: '/guide'};
|
||||
const tagTutorial = {label: 'Tutorial', permalink: '/tutorial'};
|
||||
const tagAPI = {label: 'API', permalink: '/api'};
|
||||
|
||||
// This one will be grouped under same permalink and label is ignored
|
||||
const tagTutorialOtherLabel = {
|
||||
label: 'TutorialOtherLabel',
|
||||
permalink: '/tutorial',
|
||||
};
|
||||
|
||||
const item1: SomeTaggedItem = {
|
||||
id: '1',
|
||||
nested: {
|
||||
tags: [
|
||||
tagGuide,
|
||||
tagTutorial,
|
||||
tagAPI,
|
||||
// Add some duplicates on purpose: they should be filtered
|
||||
tagGuide,
|
||||
tagTutorialOtherLabel,
|
||||
],
|
||||
},
|
||||
};
|
||||
const item2: SomeTaggedItem = {
|
||||
id: '2',
|
||||
nested: {
|
||||
tags: [tagAPI],
|
||||
},
|
||||
};
|
||||
const item3: SomeTaggedItem = {
|
||||
id: '3',
|
||||
nested: {
|
||||
tags: [tagTutorial],
|
||||
},
|
||||
};
|
||||
const item4: SomeTaggedItem = {
|
||||
id: '4',
|
||||
nested: {
|
||||
tags: [tagTutorialOtherLabel],
|
||||
},
|
||||
};
|
||||
|
||||
const input: Input = [item1, item2, item3, item4];
|
||||
|
||||
const expectedOutput: Output = {
|
||||
'/guide': {tag: tagGuide, items: [item1]},
|
||||
'/tutorial': {tag: tagTutorial, items: [item1, item3, item4]},
|
||||
'/api': {tag: tagAPI, items: [item1, item2]},
|
||||
};
|
||||
|
||||
expect(groupItems(input)).toEqual(expectedOutput);
|
||||
});
|
||||
});
|
|
@ -23,6 +23,10 @@ import resolvePathnameUnsafe from 'resolve-pathname';
|
|||
|
||||
import {posixPath as posixPathImport} from './posixPath';
|
||||
import {simpleHash, docuHash} from './hashUtils';
|
||||
import {normalizeUrl} from './normalizeUrl';
|
||||
|
||||
export * from './normalizeUrl';
|
||||
export * from './tags';
|
||||
|
||||
export const posixPath = posixPathImport;
|
||||
|
||||
|
@ -190,80 +194,6 @@ export function getSubFolder(file: string, refDir: string): string | null {
|
|||
return match && match[1];
|
||||
}
|
||||
|
||||
export function normalizeUrl(rawUrls: string[]): string {
|
||||
const urls = [...rawUrls];
|
||||
const resultArray = [];
|
||||
|
||||
let hasStartingSlash = false;
|
||||
let hasEndingSlash = false;
|
||||
|
||||
// If the first part is a plain protocol, we combine it with the next part.
|
||||
if (urls[0].match(/^[^/:]+:\/*$/) && urls.length > 1) {
|
||||
const first = urls.shift();
|
||||
urls[0] = first + urls[0];
|
||||
}
|
||||
|
||||
// There must be two or three slashes in the file protocol,
|
||||
// two slashes in anything else.
|
||||
const replacement = urls[0].match(/^file:\/\/\//) ? '$1:///' : '$1://';
|
||||
urls[0] = urls[0].replace(/^([^/:]+):\/*/, replacement);
|
||||
|
||||
// eslint-disable-next-line
|
||||
for (let i = 0; i < urls.length; i++) {
|
||||
let component = urls[i];
|
||||
|
||||
if (typeof component !== 'string') {
|
||||
throw new TypeError(`Url must be a string. Received ${typeof component}`);
|
||||
}
|
||||
|
||||
if (component === '') {
|
||||
if (i === urls.length - 1 && hasEndingSlash) {
|
||||
resultArray.push('/');
|
||||
}
|
||||
// eslint-disable-next-line
|
||||
continue;
|
||||
}
|
||||
|
||||
if (component !== '/') {
|
||||
if (i > 0) {
|
||||
// Removing the starting slashes for each component but the first.
|
||||
component = component.replace(
|
||||
/^[/]+/,
|
||||
// Special case where the first element of rawUrls is empty ["", "/hello"] => /hello
|
||||
component[0] === '/' && !hasStartingSlash ? '/' : '',
|
||||
);
|
||||
}
|
||||
|
||||
hasEndingSlash = component[component.length - 1] === '/';
|
||||
// Removing the ending slashes for each component but the last.
|
||||
// For the last component we will combine multiple slashes to a single one.
|
||||
component = component.replace(/[/]+$/, i < urls.length - 1 ? '' : '/');
|
||||
}
|
||||
|
||||
hasStartingSlash = true;
|
||||
resultArray.push(component);
|
||||
}
|
||||
|
||||
let str = resultArray.join('/');
|
||||
// Each input component is now separated by a single slash
|
||||
// except the possible first plain protocol part.
|
||||
|
||||
// Remove trailing slash before parameters or hash.
|
||||
str = str.replace(/\/(\?|&|#[^!])/g, '$1');
|
||||
|
||||
// Replace ? in parameters with &.
|
||||
const parts = str.split('?');
|
||||
str = parts.shift() + (parts.length > 0 ? '?' : '') + parts.join('&');
|
||||
|
||||
// Dedupe forward slashes in the entire path, avoiding protocol slashes.
|
||||
str = str.replace(/([^:]\/)\/+/g, '$1');
|
||||
|
||||
// Dedupe forward slashes at the beginning of the path.
|
||||
str = str.replace(/^\/+/g, '/');
|
||||
|
||||
return str;
|
||||
}
|
||||
|
||||
/**
|
||||
* Alias filepath relative to site directory, very useful so that we
|
||||
* don't expose user's site structure.
|
||||
|
|
80
packages/docusaurus-utils/src/normalizeUrl.ts
Normal file
80
packages/docusaurus-utils/src/normalizeUrl.ts
Normal file
|
@ -0,0 +1,80 @@
|
|||
/**
|
||||
* Copyright (c) Facebook, Inc. and its affiliates.
|
||||
*
|
||||
* This source code is licensed under the MIT license found in the
|
||||
* LICENSE file in the root directory of this source tree.
|
||||
*/
|
||||
|
||||
export function normalizeUrl(rawUrls: string[]): string {
|
||||
const urls = [...rawUrls];
|
||||
const resultArray = [];
|
||||
|
||||
let hasStartingSlash = false;
|
||||
let hasEndingSlash = false;
|
||||
|
||||
// If the first part is a plain protocol, we combine it with the next part.
|
||||
if (urls[0].match(/^[^/:]+:\/*$/) && urls.length > 1) {
|
||||
const first = urls.shift();
|
||||
urls[0] = first + urls[0];
|
||||
}
|
||||
|
||||
// There must be two or three slashes in the file protocol,
|
||||
// two slashes in anything else.
|
||||
const replacement = urls[0].match(/^file:\/\/\//) ? '$1:///' : '$1://';
|
||||
urls[0] = urls[0].replace(/^([^/:]+):\/*/, replacement);
|
||||
|
||||
// eslint-disable-next-line
|
||||
for (let i = 0; i < urls.length; i++) {
|
||||
let component = urls[i];
|
||||
|
||||
if (typeof component !== 'string') {
|
||||
throw new TypeError(`Url must be a string. Received ${typeof component}`);
|
||||
}
|
||||
|
||||
if (component === '') {
|
||||
if (i === urls.length - 1 && hasEndingSlash) {
|
||||
resultArray.push('/');
|
||||
}
|
||||
// eslint-disable-next-line
|
||||
continue;
|
||||
}
|
||||
|
||||
if (component !== '/') {
|
||||
if (i > 0) {
|
||||
// Removing the starting slashes for each component but the first.
|
||||
component = component.replace(
|
||||
/^[/]+/,
|
||||
// Special case where the first element of rawUrls is empty ["", "/hello"] => /hello
|
||||
component[0] === '/' && !hasStartingSlash ? '/' : '',
|
||||
);
|
||||
}
|
||||
|
||||
hasEndingSlash = component[component.length - 1] === '/';
|
||||
// Removing the ending slashes for each component but the last.
|
||||
// For the last component we will combine multiple slashes to a single one.
|
||||
component = component.replace(/[/]+$/, i < urls.length - 1 ? '' : '/');
|
||||
}
|
||||
|
||||
hasStartingSlash = true;
|
||||
resultArray.push(component);
|
||||
}
|
||||
|
||||
let str = resultArray.join('/');
|
||||
// Each input component is now separated by a single slash
|
||||
// except the possible first plain protocol part.
|
||||
|
||||
// Remove trailing slash before parameters or hash.
|
||||
str = str.replace(/\/(\?|&|#[^!])/g, '$1');
|
||||
|
||||
// Replace ? in parameters with &.
|
||||
const parts = str.split('?');
|
||||
str = parts.shift() + (parts.length > 0 ? '?' : '') + parts.join('&');
|
||||
|
||||
// Dedupe forward slashes in the entire path, avoiding protocol slashes.
|
||||
str = str.replace(/([^:]\/)\/+/g, '$1');
|
||||
|
||||
// Dedupe forward slashes at the beginning of the path.
|
||||
str = str.replace(/^\/+/g, '/');
|
||||
|
||||
return str;
|
||||
}
|
100
packages/docusaurus-utils/src/tags.ts
Normal file
100
packages/docusaurus-utils/src/tags.ts
Normal file
|
@ -0,0 +1,100 @@
|
|||
/**
|
||||
* Copyright (c) Facebook, Inc. and its affiliates.
|
||||
*
|
||||
* This source code is licensed under the MIT license found in the
|
||||
* LICENSE file in the root directory of this source tree.
|
||||
*/
|
||||
|
||||
import {kebabCase, uniq, uniqBy} from 'lodash';
|
||||
import {normalizeUrl} from './normalizeUrl';
|
||||
|
||||
export type Tag = {
|
||||
label: string;
|
||||
permalink: string;
|
||||
};
|
||||
|
||||
export type FrontMatterTag = string | Tag;
|
||||
|
||||
export function normalizeFrontMatterTag(
|
||||
tagsPath: string,
|
||||
frontMatterTag: FrontMatterTag,
|
||||
): Tag {
|
||||
function toTagObject(tagString: string): Tag {
|
||||
return {
|
||||
label: tagString,
|
||||
permalink: kebabCase(tagString),
|
||||
};
|
||||
}
|
||||
|
||||
// TODO maybe make ensure the permalink is valid url path?
|
||||
function normalizeTagPermalink(permalink: string): string {
|
||||
// note: we always apply tagsPath on purpose
|
||||
// for versioned docs, v1/doc.md and v2/doc.md tags with custom permalinks don't lead to the same created page
|
||||
// tagsPath is different for each doc version
|
||||
return normalizeUrl([tagsPath, permalink]);
|
||||
}
|
||||
|
||||
const tag: Tag =
|
||||
typeof frontMatterTag === 'string'
|
||||
? toTagObject(frontMatterTag)
|
||||
: frontMatterTag;
|
||||
|
||||
return {
|
||||
label: tag.label,
|
||||
permalink: normalizeTagPermalink(tag.permalink),
|
||||
};
|
||||
}
|
||||
|
||||
export function normalizeFrontMatterTags(
|
||||
tagsPath: string,
|
||||
frontMatterTags: FrontMatterTag[] | undefined,
|
||||
): Tag[] {
|
||||
const tags =
|
||||
frontMatterTags?.map((tag) => normalizeFrontMatterTag(tagsPath, tag)) ?? [];
|
||||
|
||||
return uniqBy(tags, (tag) => tag.permalink);
|
||||
}
|
||||
|
||||
export type TaggedItemGroup<Item> = {
|
||||
tag: Tag;
|
||||
items: Item[];
|
||||
};
|
||||
|
||||
// Permits to group docs/blogPosts by tag (provided by FrontMatter)
|
||||
// Note: groups are indexed by permalink, because routes must be unique in the end
|
||||
// Labels may vary on 2 md files but they are normalized.
|
||||
// Docs with label='some label' and label='some-label' should end-up in the same group/page in the end
|
||||
// We can't create 2 routes /some-label because one would override the other
|
||||
export function groupTaggedItems<Item>(
|
||||
items: Item[],
|
||||
getItemTags: (item: Item) => Tag[],
|
||||
): Record<string, TaggedItemGroup<Item>> {
|
||||
const result: Record<string, TaggedItemGroup<Item>> = {};
|
||||
|
||||
function handleItemTag(item: Item, tag: Tag) {
|
||||
// Init missing tag groups
|
||||
// TODO: it's not really clear what should be the behavior if 2 items have the same tag but the permalink is different for each
|
||||
// For now, the first tag found wins
|
||||
result[tag.permalink] = result[tag.permalink] ?? {
|
||||
tag,
|
||||
items: [],
|
||||
};
|
||||
|
||||
// Add item to group
|
||||
result[tag.permalink].items.push(item);
|
||||
}
|
||||
|
||||
items.forEach((item) => {
|
||||
getItemTags(item).forEach((tag) => {
|
||||
handleItemTag(item, tag);
|
||||
});
|
||||
});
|
||||
|
||||
// If user add twice the same tag to a md doc (weird but possible),
|
||||
// we don't want the item to appear twice in the list...
|
||||
Object.values(result).forEach((group) => {
|
||||
group.items = uniq(group.items);
|
||||
});
|
||||
|
||||
return result;
|
||||
}
|
Loading…
Add table
Add a link
Reference in a new issue