test(utils, mdx-loader, core): improve coverage (#6303)

* test(utils, mdx-loader, core): improve coverage

* windows...

* fix
This commit is contained in:
Joshua Chen 2022-01-10 15:00:51 +08:00 committed by GitHub
parent cf265c051e
commit a79c23bc45
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
38 changed files with 841 additions and 219 deletions

View file

@ -0,0 +1 @@
![img](./notFound.png)

View file

@ -2,7 +2,7 @@
![](./static/img.png)
![img](./static/img.png)
![img](static/img.png)
![img from second static folder](/img2.png)

View file

@ -2,6 +2,8 @@
exports[`transformImage plugin fail if image does not exist 1`] = `"Image packages/docusaurus-mdx-loader/src/remark/transformImage/__tests__/__fixtures__/static/img/doesNotExist.png or packages/docusaurus-mdx-loader/src/remark/transformImage/__tests__/__fixtures__/static2/img/doesNotExist.png used in packages/docusaurus-mdx-loader/src/remark/transformImage/__tests__/__fixtures__/fail.md not found."`;
exports[`transformImage plugin fail if image relative path does not exist 1`] = `"Image packages/docusaurus-mdx-loader/src/remark/transformImage/__tests__/__fixtures__/notFound.png used in packages/docusaurus-mdx-loader/src/remark/transformImage/__tests__/__fixtures__/fail2.md not found."`;
exports[`transformImage plugin fail if image url is absent 1`] = `"Markdown image URL is mandatory in \\"packages/docusaurus-mdx-loader/src/remark/transformImage/__tests__/__fixtures__/noUrl.md\\" file"`;
exports[`transformImage plugin pathname protocol 1`] = `

View file

@ -45,6 +45,11 @@ describe('transformImage plugin', () => {
processFixture('fail', {staticDirs}),
).rejects.toThrowErrorMatchingSnapshot();
});
test('fail if image relative path does not exist', async () => {
await expect(
processFixture('fail2', {staticDirs}),
).rejects.toThrowErrorMatchingSnapshot();
});
test('fail if image url is absent', async () => {
await expect(
processFixture('noUrl', {staticDirs}),

View file

@ -33,13 +33,9 @@ const createJSX = (node: Image, pathUrl: string) => {
(jsxNode as unknown as Literal).type = 'jsx';
(jsxNode as unknown as Literal).value = `<img ${
node.alt ? `alt={"${escapeHtml(node.alt)}"} ` : ''
}${
node.url
? `src={require("${inlineMarkdownImageFileLoader}${escapePath(
}${`src={require("${inlineMarkdownImageFileLoader}${escapePath(
pathUrl,
)}").default}`
: ''
}${node.title ? ` title="${escapeHtml(node.title)}"` : ''} />`;
)}").default}`}${node.title ? ` title="${escapeHtml(node.title)}"` : ''} />`;
if (jsxNode.url) {
delete (jsxNode as Partial<Image>).url;

View file

@ -8,6 +8,8 @@
[asset](asset.pdf 'Title')
[page](noUrl.md)
## Heading
```md

View file

@ -0,0 +1 @@
[nonexistent](@site/foo.pdf)

View file

@ -2,6 +2,8 @@
exports[`transformAsset plugin fail if asset url is absent 1`] = `"Markdown link URL is mandatory in \\"packages/docusaurus-mdx-loader/src/remark/transformLinks/__tests__/__fixtures__/noUrl.md\\" file (title: asset, line: 1)."`;
exports[`transformAsset plugin fail if asset with site alias does not exist 1`] = `"Asset packages/docusaurus-mdx-loader/src/remark/transformLinks/__tests__/__fixtures__/foo.pdf used in packages/docusaurus-mdx-loader/src/remark/transformLinks/__tests__/__fixtures__/nonexistentSiteAlias.md not found."`;
exports[`transformAsset plugin pathname protocol 1`] = `
"[asset](pathname:///asset/unchecked.pdf)
"
@ -18,6 +20,8 @@ exports[`transformAsset plugin transform md links to <a /> 1`] = `
<a target=\\"_blank\\" href={require('![CWD]/node_modules/file-loader/dist/cjs.js?name=assets/files/[name]-[hash].[ext]!./asset.pdf').default} title=\\"Title\\">asset</a>
[page](noUrl.md)
## Heading
\`\`\`md

View file

@ -43,6 +43,12 @@ describe('transformAsset plugin', () => {
).rejects.toThrowErrorMatchingSnapshot();
});
test('fail if asset with site alias does not exist', async () => {
await expect(
processFixture('nonexistentSiteAlias'),
).rejects.toThrowErrorMatchingSnapshot();
});
test('transform md links to <a />', async () => {
const result = await processFixture('asset');
expect(result).toMatchSnapshot();

View file

@ -59,11 +59,11 @@ function toAssetRequireNode({
path.relative(path.dirname(filePath), requireAssetPath),
);
const hash = hashRegex.test(node.url)
? node.url.substr(node.url.indexOf('#'))
? node.url.substring(node.url.indexOf('#'))
: '';
// nodejs does not like require("assets/file.pdf")
relativeRequireAssetPath = relativeRequireAssetPath.startsWith('.')
// require("assets/file.pdf") means requiring from a package called assets
relativeRequireAssetPath = relativeRequireAssetPath.startsWith('./')
? relativeRequireAssetPath
: `./${relativeRequireAssetPath}`;
@ -90,7 +90,7 @@ async function convertToAssetLinkIfNeeded(
const hasSiteAlias = assetPath.startsWith('@site/');
const hasAssetLikeExtension =
path.extname(assetPath) && !assetPath.match(/#|.md|.mdx|.html/);
path.extname(assetPath) && !assetPath.match(/#|\.md$|\.mdx$|\.html$/);
const looksLikeAssetLink = hasSiteAlias || hasAssetLikeExtension;

View file

@ -13,9 +13,9 @@ import {
describe('createToExtensionsRedirects', () => {
test('should reject empty extensions', () => {
expect(() => {
createToExtensionsRedirects(['/'], ['.html']);
createToExtensionsRedirects(['/'], ['']);
}).toThrowErrorMatchingInlineSnapshot(`
"Extension \\".html\\" contains a \\".\\" (dot) which is not allowed.
"Extension \\"\\" is not allowed.
If the redirect extension system is not good enough for your usecase, you can create redirects yourself with the \\"createRedirects\\" plugin option."
`);
});

View file

@ -16,7 +16,7 @@ import type {
BlogTags,
} from './types';
import {
parseMarkdownFile,
parseMarkdownString,
normalizeUrl,
aliasedSitePath,
getEditUrl,
@ -104,13 +104,22 @@ function formatBlogPostDate(locale: string, date: Date): string {
}
async function parseBlogPostMarkdownFile(blogSourceAbsolute: string) {
const result = await parseMarkdownFile(blogSourceAbsolute, {
const markdownString = await fs.readFile(blogSourceAbsolute, 'utf-8');
try {
const result = parseMarkdownString(markdownString, {
removeContentTitle: true,
});
return {
...result,
frontMatter: validateBlogPostFrontMatter(result.frontMatter),
};
} catch (e) {
throw new Error(
`Error while parsing blog post file ${blogSourceAbsolute}: "${
(e as Error).message
}".`,
);
}
}
const defaultReadingTime: ReadingTimeFunction = ({content, options}) =>

View file

@ -8,13 +8,13 @@
import path from 'path';
import fs from 'fs';
import {defaultConfig, compile} from 'eta';
import {normalizeUrl, getSwizzledComponent} from '@docusaurus/utils';
import {normalizeUrl} from '@docusaurus/utils';
import {readDefaultCodeTranslationMessages} from '@docusaurus/theme-translations';
import logger from '@docusaurus/logger';
import openSearchTemplate from './templates/opensearch';
import {memoize} from 'lodash';
import type {DocusaurusContext, Plugin} from '@docusaurus/types';
import type {LoadContext, Plugin} from '@docusaurus/types';
const getCompiledOpenSearchTemplate = memoize(() =>
compile(openSearchTemplate.trim()),
@ -31,26 +31,16 @@ function renderOpenSearchTemplate(data: {
const OPEN_SEARCH_FILENAME = 'opensearch.xml';
export default function theme(
context: DocusaurusContext & {baseUrl: string},
): Plugin<void> {
export default function themeSearchAlgolia(context: LoadContext): Plugin<void> {
const {
baseUrl,
siteConfig: {title, url, favicon},
i18n: {currentLocale},
} = context;
const pageComponent = './theme/SearchPage/index.js';
const pagePath =
getSwizzledComponent(pageComponent) ||
path.resolve(__dirname, pageComponent);
return {
name: 'docusaurus-theme-search-algolia',
getPathsToWatch() {
return [pagePath];
},
getThemePath() {
return path.resolve(__dirname, './theme');
},
@ -69,7 +59,7 @@ export default function theme(
async contentLoaded({actions: {addRoute}}) {
addRoute({
path: normalizeUrl([baseUrl, 'search']),
component: pagePath,
component: '@theme/SearchPage',
exact: true,
});
},

View file

@ -8,6 +8,10 @@ exports[`validation schemas AdmonitionsSchema: for value=null 1`] = `"\\"value\\
exports[`validation schemas AdmonitionsSchema: for value=true 1`] = `"\\"value\\" must be of type object"`;
exports[`validation schemas PathnameSchema: for value="foo" 1`] = `"\\"value\\" is not a valid pathname. Pathname should start with slash and not contain any domain or query string."`;
exports[`validation schemas PathnameSchema: for value="https://github.com/foo" 1`] = `"\\"value\\" is not a valid pathname. Pathname should start with slash and not contain any domain or query string."`;
exports[`validation schemas PluginIdSchema: for value="/docs" 1`] = `"\\"value\\" with value \\"/docs\\" fails to match the required pattern: /^[a-zA-Z_-]+$/"`;
exports[`validation schemas PluginIdSchema: for value="do cs" 1`] = `"\\"value\\" with value \\"do cs\\" fails to match the required pattern: /^[a-zA-Z_-]+$/"`;

View file

@ -13,6 +13,7 @@ import {
RemarkPluginsSchema,
PluginIdSchema,
URISchema,
PathnameSchema,
} from '../validationSchemas';
function createTestHelpers({
@ -128,4 +129,12 @@ describe('validation schemas', () => {
testOK(protocolRelativeUrl1);
testOK(protocolRelativeUrl2);
});
test('PathnameSchema', () => {
const {testFail, testOK} = createTestHelpers({schema: PathnameSchema});
testOK('/foo');
testFail('foo');
testFail('https://github.com/foo');
});
});

View file

@ -36,6 +36,17 @@ describe('validateFrontMatter', () => {
);
});
test('should not convert simple values', () => {
const schema = Joi.object({
test: JoiFrontMatter.string(),
});
const frontMatter = {
test: 'foo',
tags: ['foo', 'bar'],
};
expect(validateFrontMatter(frontMatter, schema)).toEqual(frontMatter);
});
// Fix Yaml trying to convert strings to numbers automatically
// We only want to deal with a single type in the final frontmatter (not string | number)
test('should convert number values to string when string schema', () => {

View file

@ -34,12 +34,9 @@ export const URISchema = Joi.alternatives(
// This custom validation logic is required notably because Joi does not accept paths like /a/b/c ...
Joi.custom((val, helpers) => {
try {
const url = new URL(val);
if (url) {
// eslint-disable-next-line no-new
new URL(val);
return val;
} else {
return helpers.error('any.invalid');
}
} catch {
return helpers.error('any.invalid');
}
@ -53,9 +50,8 @@ export const PathnameSchema = Joi.string()
.custom((val) => {
if (!isValidPathname(val)) {
throw new Error();
} else {
return val;
}
return val;
})
.message(
'{{#label}} is not a valid pathname. Pathname should start with slash and not contain any domain or query string.',

View file

@ -141,6 +141,10 @@ describe('getDataFileData', () => {
);
}
test('returns undefined for nonexistent file', async () => {
await expect(readDataFile('nonexistent.yml')).resolves.toBeUndefined();
});
test('read valid yml author file', async () => {
await expect(readDataFile('valid.yml')).resolves.toEqual({a: 1});
});

View file

@ -19,9 +19,17 @@ import {
mapAsyncSequential,
findAsyncSequential,
updateTranslationFileMessages,
parseMarkdownHeadingId,
encodePath,
addTrailingPathSeparator,
resolvePathname,
getPluginI18nPath,
generate,
reportMessage,
posixPath,
} from '../index';
import {sum} from 'lodash';
import fs from 'fs-extra';
import path from 'path';
describe('load utils', () => {
test('fileToPath', () => {
@ -40,6 +48,12 @@ describe('load utils', () => {
});
});
test('encodePath', () => {
expect(encodePath('a/foo/')).toEqual('a/foo/');
expect(encodePath('a/<foo>/')).toEqual('a/%3Cfoo%3E/');
expect(encodePath('a/你好/')).toEqual('a/%E4%BD%A0%E5%A5%BD/');
});
test('genChunkName', () => {
const firstAssert: Record<string, string> = {
'/docs/adding-blog': 'docs-adding-blog-062',
@ -84,6 +98,28 @@ describe('load utils', () => {
expect(genChunkName('d', undefined, undefined, true)).toBe('8277e091');
});
test('addTrailingPathSeparator', () => {
expect(addTrailingPathSeparator('foo')).toEqual(
process.platform === 'win32' ? 'foo\\' : 'foo/',
);
expect(addTrailingPathSeparator('foo/')).toEqual(
process.platform === 'win32' ? 'foo\\' : 'foo/',
);
});
test('resolvePathname', () => {
// These tests are directly copied from https://github.com/mjackson/resolve-pathname/blob/master/modules/__tests__/resolvePathname-test.js
// Maybe we want to wrap that logic in the future?
expect(resolvePathname('c')).toEqual('c');
expect(resolvePathname('c', 'a/b')).toEqual('a/c');
expect(resolvePathname('/c', '/a/b')).toEqual('/c');
expect(resolvePathname('', '/a/b')).toEqual('/a/b');
expect(resolvePathname('../c', '/a/b')).toEqual('/c');
expect(resolvePathname('c', '/a/b')).toEqual('/a/c');
expect(resolvePathname('c', '/a/')).toEqual('/a/c');
expect(resolvePathname('..', '/a/b')).toEqual('/');
});
test('isValidPathname', () => {
expect(isValidPathname('/')).toBe(true);
expect(isValidPathname('/hey')).toBe(true);
@ -93,12 +129,48 @@ describe('load utils', () => {
expect(isValidPathname('/hey///ho///')).toBe(true); // Unexpected but valid
expect(isValidPathname('/hey/héllô you')).toBe(true);
//
expect(isValidPathname('')).toBe(false);
expect(isValidPathname('hey')).toBe(false);
expect(isValidPathname('/hey?qs=ho')).toBe(false);
expect(isValidPathname('https://fb.com/hey')).toBe(false);
expect(isValidPathname('//hey')).toBe(false);
expect(isValidPathname('////')).toBe(false);
});
});
describe('generate', () => {
test('behaves correctly', async () => {
const writeMock = jest.spyOn(fs, 'writeFile').mockImplementation(() => {});
const existsMock = jest.spyOn(fs, 'existsSync');
const readMock = jest.spyOn(fs, 'readFile');
// First call: no file, no cache
existsMock.mockImplementationOnce(() => false);
await generate(__dirname, 'foo', 'bar');
expect(writeMock).toHaveBeenNthCalledWith(
1,
path.join(__dirname, 'foo'),
'bar',
);
// Second call: cache exists
await generate(__dirname, 'foo', 'bar');
expect(writeMock).toBeCalledTimes(1);
// Generate another: file exists, cache doesn't
existsMock.mockImplementationOnce(() => true);
// @ts-expect-error: seems the typedef doesn't understand overload
readMock.mockImplementationOnce(() => Promise.resolve('bar'));
await generate(__dirname, 'baz', 'bar');
expect(writeMock).toBeCalledTimes(1);
// Generate again: force skip cache
await generate(__dirname, 'foo', 'bar', true);
expect(writeMock).toHaveBeenNthCalledWith(
2,
path.join(__dirname, 'foo'),
'bar',
);
});
});
@ -257,7 +329,7 @@ describe('mapAsyncSequential', () => {
});
});
describe('findAsyncSequencial', () => {
describe('findAsyncSequential', () => {
function sleep(timeout: number): Promise<void> {
return new Promise((resolve) => {
setTimeout(resolve, timeout);
@ -311,50 +383,76 @@ describe('updateTranslationFileMessages', () => {
});
});
describe('parseMarkdownHeadingId', () => {
test('can parse simple heading without id', () => {
expect(parseMarkdownHeadingId('## Some heading')).toEqual({
text: '## Some heading',
id: undefined,
});
});
test('can parse simple heading with id', () => {
expect(parseMarkdownHeadingId('## Some heading {#custom-_id}')).toEqual({
text: '## Some heading',
id: 'custom-_id',
});
});
test('can parse heading not ending with the id', () => {
expect(parseMarkdownHeadingId('## {#custom-_id} Some heading')).toEqual({
text: '## {#custom-_id} Some heading',
id: undefined,
});
});
test('can parse heading with multiple id', () => {
expect(parseMarkdownHeadingId('## Some heading {#id1} {#id2}')).toEqual({
text: '## Some heading {#id1}',
id: 'id2',
});
});
test('can parse heading with link and id', () => {
describe('getPluginI18nPath', () => {
test('gets correct path', () => {
expect(
parseMarkdownHeadingId(
'## Some heading [facebook](https://facebook.com) {#id}',
posixPath(
getPluginI18nPath({
siteDir: __dirname,
locale: 'zh-Hans',
pluginName: 'plugin-content-docs',
pluginId: 'community',
subPaths: ['foo'],
}).replace(__dirname, ''),
),
).toEqual({
text: '## Some heading [facebook](https://facebook.com)',
id: 'id',
).toEqual('/i18n/zh-Hans/plugin-content-docs-community/foo');
});
test('gets correct path for default plugin', () => {
expect(
posixPath(
getPluginI18nPath({
siteDir: __dirname,
locale: 'zh-Hans',
pluginName: 'plugin-content-docs',
subPaths: ['foo'],
}).replace(__dirname, ''),
),
).toEqual('/i18n/zh-Hans/plugin-content-docs/foo');
});
test('gets correct path when no subpaths', () => {
expect(
posixPath(
getPluginI18nPath({
siteDir: __dirname,
locale: 'zh-Hans',
pluginName: 'plugin-content-docs',
}).replace(__dirname, ''),
),
).toEqual('/i18n/zh-Hans/plugin-content-docs');
});
});
test('can parse heading with only id', () => {
expect(parseMarkdownHeadingId('## {#id}')).toEqual({
text: '##',
id: 'id',
});
describe('reportMessage', () => {
test('all severities', () => {
const consoleLog = jest.spyOn(console, 'info').mockImplementation(() => {});
const consoleWarn = jest
.spyOn(console, 'warn')
.mockImplementation(() => {});
const consoleError = jest
.spyOn(console, 'error')
.mockImplementation(() => {});
reportMessage('hey', 'ignore');
reportMessage('hey', 'log');
reportMessage('hey', 'warn');
reportMessage('hey', 'error');
expect(() =>
reportMessage('hey', 'throw'),
).toThrowErrorMatchingInlineSnapshot(`"hey"`);
expect(() =>
// @ts-expect-error: for test
reportMessage('hey', 'foo'),
).toThrowErrorMatchingInlineSnapshot(
`"Unexpected \\"reportingSeverity\\" value: foo."`,
);
expect(consoleLog).toBeCalledTimes(1);
expect(consoleLog).toBeCalledWith(expect.stringMatching(/.*\[INFO].* hey/));
expect(consoleWarn).toBeCalledTimes(1);
expect(consoleWarn).toBeCalledWith(
expect.stringMatching(/.*\[WARNING].* hey/),
);
expect(consoleError).toBeCalledTimes(1);
expect(consoleError).toBeCalledWith(
expect.stringMatching(/.*\[ERROR].* hey/),
);
});
});

View file

@ -0,0 +1,266 @@
/**
* Copyright (c) Facebook, Inc. and its affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree.
*/
import {replaceMarkdownLinks} from '../markdownLinks';
describe('replaceMarkdownLinks', () => {
test('basic replace', () => {
expect(
replaceMarkdownLinks({
siteDir: '.',
filePath: 'docs/intro.md',
contentPaths: {
contentPath: 'docs',
contentPathLocalized: 'i18n/docs-localized',
},
sourceToPermalink: {
'@site/docs/intro.md': '/docs/intro',
'@site/docs/foo.md': '/doc/foo',
'@site/docs/bar/baz.md': '/doc/baz',
},
fileString: `
[foo](./foo.md)
[baz](./bar/baz.md)
[foo](foo.md)
[http](http://github.com/facebook/docusaurus/README.md)
[https](https://github.com/facebook/docusaurus/README.md)
[asset](./foo.js)
[nonexistent](hmmm.md)
`,
}),
).toMatchInlineSnapshot(`
Object {
"brokenMarkdownLinks": Array [
Object {
"contentPaths": Object {
"contentPath": "docs",
"contentPathLocalized": "i18n/docs-localized",
},
"filePath": "docs/intro.md",
"link": "hmmm.md",
},
],
"newContent": "
[foo](/doc/foo)
[baz](/doc/baz)
[foo](/doc/foo)
[http](http://github.com/facebook/docusaurus/README.md)
[https](https://github.com/facebook/docusaurus/README.md)
[asset](./foo.js)
[nonexistent](hmmm.md)
",
}
`);
});
// TODO bad
test('links in HTML comments', () => {
expect(
replaceMarkdownLinks({
siteDir: '.',
filePath: 'docs/intro.md',
contentPaths: {
contentPath: 'docs',
contentPathLocalized: 'i18n/docs-localized',
},
sourceToPermalink: {
'@site/docs/intro.md': '/docs/intro',
},
fileString: `
<!-- [foo](./foo.md) -->
<!--
[foo](./foo.md)
-->
`,
}),
).toMatchInlineSnapshot(`
Object {
"brokenMarkdownLinks": Array [
Object {
"contentPaths": Object {
"contentPath": "docs",
"contentPathLocalized": "i18n/docs-localized",
},
"filePath": "docs/intro.md",
"link": "./foo.md",
},
Object {
"contentPaths": Object {
"contentPath": "docs",
"contentPathLocalized": "i18n/docs-localized",
},
"filePath": "docs/intro.md",
"link": "./foo.md",
},
],
"newContent": "
<!-- [foo](./foo.md) -->
<!--
[foo](./foo.md)
-->
",
}
`);
});
test('links in fenced blocks', () => {
expect(
replaceMarkdownLinks({
siteDir: '.',
filePath: 'docs/intro.md',
contentPaths: {
contentPath: 'docs',
contentPathLocalized: 'i18n/docs-localized',
},
sourceToPermalink: {
'@site/docs/intro.md': '/docs/intro',
},
fileString: `
\`\`\`
[foo](foo.md)
\`\`\`
\`\`\`\`js
[foo](foo.md)
\`\`\`
[foo](foo.md)
\`\`\`
[foo](foo.md)
\`\`\`\`
\`\`\`\`js
[foo](foo.md)
\`\`\`
[foo](foo.md)
\`\`\`\`
`,
}),
).toMatchInlineSnapshot(`
Object {
"brokenMarkdownLinks": Array [],
"newContent": "
\`\`\`
[foo](foo.md)
\`\`\`
\`\`\`\`js
[foo](foo.md)
\`\`\`
[foo](foo.md)
\`\`\`
[foo](foo.md)
\`\`\`\`
\`\`\`\`js
[foo](foo.md)
\`\`\`
[foo](foo.md)
\`\`\`\`
",
}
`);
});
// TODO bad
test('links in inline code', () => {
expect(
replaceMarkdownLinks({
siteDir: '.',
filePath: 'docs/intro.md',
contentPaths: {
contentPath: 'docs',
contentPathLocalized: 'i18n/docs-localized',
},
sourceToPermalink: {
'@site/docs/intro.md': '/docs/intro',
},
fileString: `
\`[foo](foo.md)\`
`,
}),
).toMatchInlineSnapshot(`
Object {
"brokenMarkdownLinks": Array [
Object {
"contentPaths": Object {
"contentPath": "docs",
"contentPathLocalized": "i18n/docs-localized",
},
"filePath": "docs/intro.md",
"link": "foo.md",
},
],
"newContent": "
\`[foo](foo.md)\`
",
}
`);
});
// TODO bad
test('links with same title as URL', () => {
expect(
replaceMarkdownLinks({
siteDir: '.',
filePath: 'docs/intro.md',
contentPaths: {
contentPath: 'docs',
contentPathLocalized: 'i18n/docs-localized',
},
sourceToPermalink: {
'@site/docs/intro.md': '/docs/intro',
'@site/docs/foo.md': '/docs/foo',
},
fileString: `
[foo.md](foo.md)
[./foo.md](./foo.md)
[foo.md](./foo.md)
[./foo.md](foo.md)
`,
}),
).toMatchInlineSnapshot(`
Object {
"brokenMarkdownLinks": Array [],
"newContent": "
[/docs/foo](foo.md)
[/docs/foo](./foo.md)
[foo.md](/docs/foo)
[.//docs/foo](foo.md)
",
}
`);
});
test('multiple links on same line', () => {
expect(
replaceMarkdownLinks({
siteDir: '.',
filePath: 'docs/intro.md',
contentPaths: {
contentPath: 'docs',
contentPathLocalized: 'i18n/docs-localized',
},
sourceToPermalink: {
'@site/docs/intro.md': '/docs/intro',
'@site/docs/a.md': '/docs/a',
'@site/docs/b.md': '/docs/b',
'@site/docs/c.md': '/docs/c',
},
fileString: `
[a](a.md), [a](a.md), [b](b.md), [c](c.md)
`,
}),
).toMatchInlineSnapshot(`
Object {
"brokenMarkdownLinks": Array [],
"newContent": "
[a](/docs/a), [a](/docs/a), [b](/docs/b), [c](/docs/c)
",
}
`);
});
});

View file

@ -9,6 +9,7 @@ import {
createExcerpt,
parseMarkdownContentTitle,
parseMarkdownString,
parseMarkdownHeadingId,
} from '../markdownParser';
import dedent from 'dedent';
@ -827,4 +828,141 @@ describe('parseMarkdownString', () => {
}
`);
});
test('should handle code blocks', () => {
expect(
parseMarkdownString(dedent`
\`\`\`js
code
\`\`\`
Content
`),
).toMatchInlineSnapshot(`
Object {
"content": "\`\`\`js
code
\`\`\`
Content",
"contentTitle": undefined,
"excerpt": "Content",
"frontMatter": Object {},
}
`);
expect(
parseMarkdownString(dedent`
\`\`\`\`js
Foo
\`\`\`diff
code
\`\`\`
Bar
\`\`\`\`
Content
`),
).toMatchInlineSnapshot(`
Object {
"content": "\`\`\`\`js
Foo
\`\`\`diff
code
\`\`\`
Bar
\`\`\`\`
Content",
"contentTitle": undefined,
"excerpt": "Content",
"frontMatter": Object {},
}
`);
expect(
parseMarkdownString(dedent`
\`\`\`\`js
Foo
\`\`\`diff
code
\`\`\`\`
Content
`),
).toMatchInlineSnapshot(`
Object {
"content": "\`\`\`\`js
Foo
\`\`\`diff
code
\`\`\`\`
Content",
"contentTitle": undefined,
"excerpt": "Content",
"frontMatter": Object {},
}
`);
});
test('throws for invalid front matter', () => {
expect(() =>
parseMarkdownString(dedent`
---
foo: f: a
---
`),
).toThrowErrorMatchingInlineSnapshot(`
"incomplete explicit mapping pair; a key node is missed; or followed by a non-tabulated empty line at line 2, column 7:
foo: f: a
^"
`);
});
});
describe('parseMarkdownHeadingId', () => {
test('can parse simple heading without id', () => {
expect(parseMarkdownHeadingId('## Some heading')).toEqual({
text: '## Some heading',
id: undefined,
});
});
test('can parse simple heading with id', () => {
expect(parseMarkdownHeadingId('## Some heading {#custom-_id}')).toEqual({
text: '## Some heading',
id: 'custom-_id',
});
});
test('can parse heading not ending with the id', () => {
expect(parseMarkdownHeadingId('## {#custom-_id} Some heading')).toEqual({
text: '## {#custom-_id} Some heading',
id: undefined,
});
});
test('can parse heading with multiple id', () => {
expect(parseMarkdownHeadingId('## Some heading {#id1} {#id2}')).toEqual({
text: '## Some heading {#id1}',
id: 'id2',
});
});
test('can parse heading with link and id', () => {
expect(
parseMarkdownHeadingId(
'## Some heading [facebook](https://facebook.com) {#id}',
),
).toEqual({
text: '## Some heading [facebook](https://facebook.com)',
id: 'id',
});
});
test('can parse heading with only id', () => {
expect(parseMarkdownHeadingId('## {#id}')).toEqual({
text: '##',
id: 'id',
});
});
});

View file

@ -11,11 +11,13 @@ import {
escapePath,
posixPath,
aliasedSitePath,
toMessageRelativeFilePath,
} from '../pathUtils';
import path from 'path';
describe('isNameTooLong', () => {
test('behaves correctly', () => {
const asserts: Record<string, boolean> = {
const asserts = {
'': false,
'foo-bar-096': false,
'foo-bar-1df': false,
@ -27,16 +29,36 @@ describe('isNameTooLong', () => {
true,
'foo-bar-foo-bar-foo-bar-foo-bar-foo-bar-foo-bar-foo-bar-foo-bar-foo-bar-foo-bar-foo-bar-foo-bar-foo-bar-foo-bar-foo-bar-foo-bar-foo-bar-foo-bar-foo-bar-foo-bar-foo-bar-foo-bar-foo-bar-foo-bar-foo-bar-foo-bar-foo-bar-foo-bar-foo-bar-test-1-test-2-787':
true,
// Every Hanzi is three bytes
:
{apfs: false, xfs: true},
};
Object.keys(asserts).forEach((path) => {
expect(isNameTooLong(path)).toBe(asserts[path]);
const oldProcessPlatform = process.platform;
Object.defineProperty(process, 'platform', {value: 'darwin'});
Object.keys(asserts).forEach((file) => {
expect(isNameTooLong(file)).toBe(
typeof asserts[file] === 'boolean' ? asserts[file] : asserts[file].apfs,
);
});
Object.defineProperty(process, 'platform', {value: 'win32'});
Object.keys(asserts).forEach((file) => {
expect(isNameTooLong(file)).toBe(
typeof asserts[file] === 'boolean' ? asserts[file] : asserts[file].apfs,
);
});
Object.defineProperty(process, 'platform', {value: 'android'});
Object.keys(asserts).forEach((file) => {
expect(isNameTooLong(file)).toBe(
typeof asserts[file] === 'boolean' ? asserts[file] : asserts[file].xfs,
);
});
Object.defineProperty(process, 'platform', {value: oldProcessPlatform});
});
});
describe('shortName', () => {
test('works', () => {
const asserts: Record<string, string> = {
const asserts = {
'': '',
'foo-bar': 'foo-bar',
'endi-lie': 'endi-lie',
@ -45,10 +67,33 @@ describe('shortName', () => {
'foo-bar-foo-bar-foo-bar-foo-bar-foo-bar-foo-bar-foo-bar-foo-bar-foo-bar-foo-bar-foo-bar-foo-bar-foo-bar-foo-bar-foo-bar-foo-bar-foo-bar-foo-bar-foo-bar-foo-bar-foo-bar-foo-bar-foo-bar-foo-bar-foo-bar-foo-bar-foo-bar-foo-bar-foo-bar-foo-bar-foo-',
'foo-bar-foo-bar-foo-bar-foo-bar-foo-bar-foo-bar-foo-bar-foo-bar-foo-bar-foo-bar-foo-bar-foo-bar-foo-bar-foo-bar-foo-bar-foo-bar-foo-bar-foo-bar-foo-bar-foo-bar-foo-bar-foo-bar-foo-bar-foo-bar-foo-bar-foo-bar-foo-bar-foo-bar-foo-bar-test-1-test-2':
'foo-bar-foo-bar-foo-bar-foo-bar-foo-bar-foo-bar-foo-bar-foo-bar-foo-bar-foo-bar-foo-bar-foo-bar-foo-bar-foo-bar-foo-bar-foo-bar-foo-bar-foo-bar-foo-bar-foo-bar-foo-bar-foo-bar-foo-bar-foo-bar-foo-bar-foo-bar-foo-bar-foo-bar-foo-bar-test-1-test-',
:
{
apfs: '字字字字字字字字字字字字字字字字字字字字字字字字字字字字字字字字字字字字字字字字字字字字字字字字字字字字字字字字字字字字字字字字字字字字字字字字字字字字字字字字字字字字字字字',
// This is pretty bad (a character clipped in half), but I doubt if it ever happens
xfs: '字字字字字字字字字字字字字字字字字字字字字字字字字字字字字字字字字字字字字字字字字字字字字字字字字字字字字字字字字字字字字字字字字字字字字字字字字字字字字字字字字<E5AD97>',
},
};
const oldProcessPlatform = process.platform;
Object.defineProperty(process, 'platform', {value: 'darwin'});
Object.keys(asserts).forEach((file) => {
expect(shortName(file)).toBe(asserts[file]);
expect(shortName(file)).toBe(
typeof asserts[file] === 'string' ? asserts[file] : asserts[file].apfs,
);
});
Object.defineProperty(process, 'platform', {value: 'win32'});
Object.keys(asserts).forEach((file) => {
expect(shortName(file)).toBe(
typeof asserts[file] === 'string' ? asserts[file] : asserts[file].apfs,
);
});
Object.defineProperty(process, 'platform', {value: 'android'});
Object.keys(asserts).forEach((file) => {
expect(shortName(file)).toBe(
typeof asserts[file] === 'string' ? asserts[file] : asserts[file].xfs,
);
});
Object.defineProperty(process, 'platform', {value: oldProcessPlatform});
});
// Based on https://github.com/gatsbyjs/gatsby/pull/21518/files
@ -70,6 +115,17 @@ describe('shortName', () => {
});
});
describe('toMessageRelativeFilePath', () => {
test('behaves correctly', () => {
jest
.spyOn(process, 'cwd')
.mockImplementationOnce(() => path.join(__dirname, '..'));
expect(
toMessageRelativeFilePath(path.join(__dirname, 'foo/bar.js')),
).toEqual('__tests__/foo/bar.js');
});
});
describe('escapePath', () => {
test('escapePath works', () => {
const asserts: Record<string, string> = {

View file

@ -85,6 +85,10 @@ describe('normalizeFrontMatterTags', () => {
expect(normalizeFrontMatterTags(tagsPath, input)).toEqual(expectedOutput);
});
test('succeeds for empty list', () => {
expect(normalizeFrontMatterTags('/foo')).toEqual([]);
});
test('should normalize complex mixed list', () => {
const tagsPath = '/all/tags';
const input: Input = [

View file

@ -5,7 +5,7 @@
* LICENSE file in the root directory of this source tree.
*/
import {normalizeUrl} from '../urlUtils';
import {normalizeUrl, getEditUrl} from '../urlUtils';
describe('normalizeUrl', () => {
test('should normalize urls correctly', () => {
@ -102,6 +102,22 @@ describe('normalizeUrl', () => {
input: ['/', '/hello/world/', '///'],
output: '/hello/world/',
},
{
input: ['file://', '//hello/world/'],
output: 'file:///hello/world/',
},
{
input: ['file:', '/hello/world/'],
output: 'file:///hello/world/',
},
{
input: ['file://', '/hello/world/'],
output: 'file:///hello/world/',
},
{
input: ['file:', 'hello/world/'],
output: 'file://hello/world/',
},
];
asserts.forEach((testCase) => {
expect(normalizeUrl(testCase.input)).toBe(testCase.output);
@ -115,3 +131,22 @@ describe('normalizeUrl', () => {
);
});
});
describe('getEditUrl', () => {
test('returns right path', () => {
expect(
getEditUrl('foo/bar.md', 'https://github.com/facebook/docusaurus'),
).toEqual('https://github.com/facebook/docusaurus/foo/bar.md');
expect(
getEditUrl('foo/你好.md', 'https://github.com/facebook/docusaurus'),
).toEqual('https://github.com/facebook/docusaurus/foo/你好.md');
});
test('always returns valid URL', () => {
expect(
getEditUrl('foo\\你好.md', 'https://github.com/facebook/docusaurus'),
).toEqual('https://github.com/facebook/docusaurus/foo/你好.md');
});
test('returns undefined for undefined', () => {
expect(getEditUrl('foo/bar.md')).toBeUndefined();
});
});

View file

@ -44,7 +44,6 @@ export async function getDataFileData<T>(
if (!filePath) {
return undefined;
}
if (await fs.pathExists(filePath)) {
try {
const contentString = await fs.readFile(filePath, {encoding: 'utf8'});
const unsafeContent = Yaml.load(contentString);
@ -55,8 +54,6 @@ export async function getDataFileData<T>(
throw e;
}
}
return undefined;
}
// Order matters: we look in priority in localized folder
export function getContentPathList(contentPaths: ContentPaths): string[] {

View file

@ -35,7 +35,7 @@ export * from './globUtils';
export * from './webpackUtils';
export * from './dataFileUtils';
const fileHash = new Map();
const fileHash = new Map<string, string>();
export async function generate(
generatedFilesDir: string,
file: string,
@ -141,7 +141,10 @@ export function addLeadingSlash(str: string): string {
}
export function addTrailingPathSeparator(str: string): string {
return str.endsWith(path.sep) ? str : `${str}${path.sep}`;
return str.endsWith(path.sep)
? str
: // If this is Windows, we need to change the forward slash to backward
`${str.replace(/\/$/, '')}${path.sep}`;
}
// TODO deduplicate: also present in @docusaurus/utils-common
@ -264,20 +267,6 @@ export function mergeTranslations(
return contents.reduce((acc, content) => ({...acc, ...content}), {});
}
export function getSwizzledComponent(
componentPath: string,
): string | undefined {
const swizzledComponentPath = path.resolve(
process.cwd(),
'src',
componentPath,
);
return fs.existsSync(swizzledComponentPath)
? swizzledComponentPath
: undefined;
}
// Useful to update all the messages of a translation file
// Used in tests to simulate translations
export function updateTranslationFileMessages(

View file

@ -64,7 +64,7 @@ export function replaceMarkdownLinks<T extends ContentPaths>({
// Replace inline-style links or reference-style links e.g:
// This is [Document 1](doc1.md) -> we replace this doc1.md with correct link
// [doc1]: doc1.md -> we replace this doc1.md with correct link
const mdRegex = /(?:(?:\]\()|(?:\]:\s?))(?!https)([^'")\]\s>]+\.mdx?)/g;
const mdRegex = /(?:(?:\]\()|(?:\]:\s?))(?!https?)([^'")\]\s>]+\.mdx?)/g;
let mdMatch = mdRegex.exec(modifiedLine);
while (mdMatch !== null) {
// Replace it to correct html link.

View file

@ -6,7 +6,6 @@
*/
import logger from '@docusaurus/logger';
import fs from 'fs-extra';
import matter from 'gray-matter';
// Input: ## Some heading {#some-heading}
@ -37,6 +36,7 @@ export function createExcerpt(fileString: string): string | undefined {
.replace(/^[^\n]*\n[=]+/g, '')
.split('\n');
let inCode = false;
let lastCodeFence = '';
/* eslint-disable no-continue */
// eslint-disable-next-line no-restricted-syntax
@ -53,7 +53,15 @@ export function createExcerpt(fileString: string): string | undefined {
// Skip code block line.
if (fileLine.trim().startsWith('```')) {
inCode = !inCode;
if (!inCode) {
inCode = true;
[lastCodeFence] = fileLine.trim().match(/^`+/)!;
// If we are in a ````-fenced block, all ``` would be plain text instead of fences
} else if (
fileLine.trim().match(/^`+/)![0].length >= lastCodeFence.length
) {
inCode = false;
}
continue;
} else if (inCode) {
continue;
@ -100,8 +108,8 @@ export function parseFrontMatter(markdownFileContent: string): {
} {
const {data, content} = matter(markdownFileContent);
return {
frontMatter: data ?? {},
content: content?.trim() ?? '',
frontMatter: data,
content: content.trim(),
};
}
@ -189,17 +197,3 @@ This can happen if you use special characters in frontmatter values (try using d
throw e;
}
}
export async function parseMarkdownFile(
source: string,
options?: {removeContentTitle?: boolean},
): Promise<ParsedMarkdown> {
const markdownString = await fs.readFile(source, 'utf-8');
try {
return parseMarkdownString(markdownString, options);
} catch (e) {
throw new Error(
`Error while parsing Markdown file ${source}: "${(e as Error).message}".`,
);
}
}

View file

@ -15,16 +15,17 @@ const MAX_PATH_SEGMENT_BYTES = 255;
// Space for appending things to the string like file extensions and so on
const SPACE_FOR_APPENDING = 10;
const isMacOs = process.platform === `darwin`;
const isWindows = process.platform === `win32`;
const isMacOs = () => process.platform === 'darwin';
const isWindows = () => process.platform === 'win32';
export const isNameTooLong = (str: string): boolean =>
isMacOs || isWindows
// This is actually not entirely correct: we can't assume FS from OS. But good enough?
isMacOs() || isWindows()
? str.length + SPACE_FOR_APPENDING > MAX_PATH_SEGMENT_CHARS // MacOS (APFS) and Windows (NTFS) filename length limit (255 chars)
: Buffer.from(str).length + SPACE_FOR_APPENDING > MAX_PATH_SEGMENT_BYTES; // Other (255 bytes)
export const shortName = (str: string): string => {
if (isMacOs || isWindows) {
if (isMacOs() || isWindows()) {
const overflowingChars = str.length - MAX_PATH_SEGMENT_CHARS;
return str.slice(
0,

View file

@ -47,10 +47,11 @@ export function normalizeFrontMatterTag(
export function normalizeFrontMatterTags(
tagsPath: string,
frontMatterTags: FrontMatterTag[] | undefined,
frontMatterTags: FrontMatterTag[] | undefined = [],
): Tag[] {
const tags =
frontMatterTags?.map((tag) => normalizeFrontMatterTag(tagsPath, tag)) ?? [];
const tags = frontMatterTags.map((tag) =>
normalizeFrontMatterTag(tagsPath, tag),
);
return uniqBy(tags, (tag) => tag.permalink);
}

View file

@ -15,8 +15,13 @@ export function normalizeUrl(rawUrls: string[]): string {
// If the first part is a plain protocol, we combine it with the next part.
if (urls[0].match(/^[^/:]+:\/*$/) && urls.length > 1) {
const first = urls.shift();
if (first!.startsWith('file:') && urls[0].startsWith('/')) {
// Force a double slash here, else we lose the information that the next segment is an absolute path
urls[0] = `${first}//${urls[0]}`;
} else {
urls[0] = first + urls[0];
}
}
// There must be two or three slashes in the file protocol,
// two slashes in anything else.
@ -71,7 +76,7 @@ export function normalizeUrl(rawUrls: string[]): string {
str = parts.shift() + (parts.length > 0 ? '?' : '') + parts.join('&');
// Dedupe forward slashes in the entire path, avoiding protocol slashes.
str = str.replace(/([^:]\/)\/+/g, '$1');
str = str.replace(/([^:/]\/)\/+/g, '$1');
// Dedupe forward slashes at the beginning of the path.
str = str.replace(/^\/+/g, '/');

View file

@ -51,6 +51,7 @@ describe('useBaseUrl', () => {
},
}));
expect(useBaseUrl('')).toEqual('');
expect(useBaseUrl('hello')).toEqual('/docusaurus/hello');
expect(useBaseUrl('/hello')).toEqual('/docusaurus/hello');
expect(useBaseUrl('hello/')).toEqual('/docusaurus/hello/');
@ -62,6 +63,7 @@ describe('useBaseUrl', () => {
expect(useBaseUrl('https://github.com')).toEqual('https://github.com');
expect(useBaseUrl('//reactjs.org')).toEqual('//reactjs.org');
expect(useBaseUrl('//reactjs.org', forcePrepend)).toEqual('//reactjs.org');
expect(useBaseUrl('/hello', forcePrepend)).toEqual('/docusaurus/hello');
expect(useBaseUrl('https://site.com', forcePrepend)).toEqual(
'https://site.com',
);

View file

@ -30,7 +30,7 @@ function addBaseUrl(
}
if (forcePrependBaseUrl) {
return baseUrl + url;
return baseUrl + url.replace(/^\//, '');
}
// We should avoid adding the baseurl twice if it's already there
@ -42,8 +42,9 @@ function addBaseUrl(
}
export function useBaseUrlUtils(): BaseUrlUtils {
const {siteConfig: {baseUrl = '/', url: siteUrl} = {}} =
useDocusaurusContext();
const {
siteConfig: {baseUrl, url: siteUrl},
} = useDocusaurusContext();
return {
withBaseUrl: (url, options) => addBaseUrl(siteUrl, baseUrl, url, options),
};

View file

@ -34,10 +34,6 @@ declare module 'react-loadable-ssr-addon-v5-slorber' {
export default plugin;
}
declare module 'resolve-pathname' {
export default function resolvePathname(to: string, from?: string): string;
}
declare module '@slorber/static-site-generator-webpack-plugin' {
export type Locals = {
routesLocation: Record<string, string>;

View file

@ -23,6 +23,7 @@ describe('brokenLinks', () => {
'/otherSourcePage': [{link: '/badLink', resolvedLink: '/badLink'}],
});
expect(message).toMatchSnapshot();
expect(getBrokenLinksErrorMessage({})).toBeUndefined();
});
test('getBrokenLinksErrorMessage with potential layout broken links', async () => {
@ -205,6 +206,7 @@ describe('brokenLinks', () => {
});
expect(result).toEqual(allCollectedLinksFiltered);
});
});
describe('Encoded link', () => {
test('getAllBrokenLinks', async () => {
@ -255,4 +257,3 @@ describe('brokenLinks', () => {
);
});
});
});

View file

@ -9,11 +9,15 @@ import {
matchRoutes,
type RouteConfig as RRRouteConfig,
} from 'react-router-config';
import resolvePathname from 'resolve-pathname';
import fs from 'fs-extra';
import {mapValues, pickBy, countBy} from 'lodash';
import type {RouteConfig, ReportingSeverity} from '@docusaurus/types';
import {removePrefix, removeSuffix, reportMessage} from '@docusaurus/utils';
import {
removePrefix,
removeSuffix,
reportMessage,
resolvePathname,
} from '@docusaurus/utils';
import {getAllFinalRoutes} from './utils';
import path from 'path';

View file

@ -12,19 +12,13 @@ import {getLangDir} from 'rtl-detect';
import logger from '@docusaurus/logger';
function getDefaultLocaleLabel(locale: string) {
// Intl.DisplayNames is ES2021 - Node14+
// https://v8.dev/features/intl-displaynames
if (typeof Intl.DisplayNames !== 'undefined') {
const languageName = new Intl.DisplayNames(locale, {type: 'language'}).of(
locale,
);
return (
languageName.charAt(0).toLocaleUpperCase(locale) +
languageName.substring(1)
languageName.charAt(0).toLocaleUpperCase(locale) + languageName.substring(1)
);
}
return locale;
}
export function getDefaultLocaleConfig(locale: string): I18nLocaleConfig {
return {

View file

@ -6,15 +6,15 @@
*/
import {getPluginVersion} from '..';
import {join} from 'path';
import path from 'path';
describe('getPluginVersion', () => {
it('Can detect external packages plugins versions of correctly.', () => {
expect(
getPluginVersion(
join(__dirname, '..', '__fixtures__', 'dummy-plugin.js'),
path.join(__dirname, '..', '__fixtures__', 'dummy-plugin.js'),
// Make the plugin appear external.
join(__dirname, '..', '..', '..', '..', '..', '..', 'website'),
path.join(__dirname, '..', '..', '..', '..', '..', '..', 'website'),
),
).toEqual({type: 'package', version: 'random-version'});
});
@ -22,9 +22,9 @@ describe('getPluginVersion', () => {
it('Can detect project plugins versions correctly.', () => {
expect(
getPluginVersion(
join(__dirname, '..', '__fixtures__', 'dummy-plugin.js'),
path.join(__dirname, '..', '__fixtures__', 'dummy-plugin.js'),
// Make the plugin appear project local.
join(__dirname, '..', '__fixtures__'),
path.join(__dirname, '..', '__fixtures__'),
),
).toEqual({type: 'project'});
});