mirror of
https://github.com/facebook/docusaurus.git
synced 2025-07-17 08:37:57 +02:00
test: enable a few jest eslint rules (#6900)
* test: enable a few jest eslint rules * more
This commit is contained in:
parent
1efc6c6091
commit
aa5a2d4c04
155 changed files with 3644 additions and 3478 deletions
|
@ -0,0 +1,136 @@
|
|||
// Jest Snapshot v1, https://goo.gl/fbAQLP
|
||||
|
||||
exports[`replaceMarkdownLinks does basic replace 1`] = `
|
||||
Object {
|
||||
"brokenMarkdownLinks": Array [
|
||||
Object {
|
||||
"contentPaths": Object {
|
||||
"contentPath": "docs",
|
||||
"contentPathLocalized": "i18n/docs-localized",
|
||||
},
|
||||
"filePath": "docs/intro.md",
|
||||
"link": "hmmm.md",
|
||||
},
|
||||
],
|
||||
"newContent": "
|
||||
[foo](/doc/foo)
|
||||
[baz](/doc/baz)
|
||||
[foo](/doc/foo)
|
||||
[http](http://github.com/facebook/docusaurus/README.md)
|
||||
[https](https://github.com/facebook/docusaurus/README.md)
|
||||
[asset](./foo.js)
|
||||
[asset as well](@site/docs/_partial.md)
|
||||
[looks like http...](/doc/http)
|
||||
[nonexistent](hmmm.md)
|
||||
",
|
||||
}
|
||||
`;
|
||||
|
||||
exports[`replaceMarkdownLinks ignores links in HTML comments 1`] = `
|
||||
Object {
|
||||
"brokenMarkdownLinks": Array [
|
||||
Object {
|
||||
"contentPaths": Object {
|
||||
"contentPath": "docs",
|
||||
"contentPathLocalized": "i18n/docs-localized",
|
||||
},
|
||||
"filePath": "docs/intro.md",
|
||||
"link": "./foo.md",
|
||||
},
|
||||
Object {
|
||||
"contentPaths": Object {
|
||||
"contentPath": "docs",
|
||||
"contentPathLocalized": "i18n/docs-localized",
|
||||
},
|
||||
"filePath": "docs/intro.md",
|
||||
"link": "./foo.md",
|
||||
},
|
||||
],
|
||||
"newContent": "
|
||||
<!-- [foo](./foo.md) -->
|
||||
<!--
|
||||
[foo](./foo.md)
|
||||
-->
|
||||
",
|
||||
}
|
||||
`;
|
||||
|
||||
exports[`replaceMarkdownLinks ignores links in fenced blocks 1`] = `
|
||||
Object {
|
||||
"brokenMarkdownLinks": Array [],
|
||||
"newContent": "
|
||||
\`\`\`
|
||||
[foo](foo.md)
|
||||
\`\`\`
|
||||
|
||||
\`\`\`\`js
|
||||
[foo](foo.md)
|
||||
\`\`\`
|
||||
[foo](foo.md)
|
||||
\`\`\`
|
||||
[foo](foo.md)
|
||||
\`\`\`\`
|
||||
|
||||
\`\`\`\`js
|
||||
[foo](foo.md)
|
||||
\`\`\`
|
||||
[foo](foo.md)
|
||||
\`\`\`\`
|
||||
",
|
||||
}
|
||||
`;
|
||||
|
||||
exports[`replaceMarkdownLinks ignores links in inline code 1`] = `
|
||||
Object {
|
||||
"brokenMarkdownLinks": Array [
|
||||
Object {
|
||||
"contentPaths": Object {
|
||||
"contentPath": "docs",
|
||||
"contentPathLocalized": "i18n/docs-localized",
|
||||
},
|
||||
"filePath": "docs/intro.md",
|
||||
"link": "foo.md",
|
||||
},
|
||||
],
|
||||
"newContent": "
|
||||
\`[foo](foo.md)\`
|
||||
",
|
||||
}
|
||||
`;
|
||||
|
||||
exports[`replaceMarkdownLinks replaces links with same title as URL 1`] = `
|
||||
Object {
|
||||
"brokenMarkdownLinks": Array [],
|
||||
"newContent": "
|
||||
[/docs/foo](foo.md)
|
||||
[/docs/foo](./foo.md)
|
||||
[foo.md](/docs/foo)
|
||||
[.//docs/foo](foo.md)
|
||||
",
|
||||
}
|
||||
`;
|
||||
|
||||
exports[`replaceMarkdownLinks replaces multiple links on same line 1`] = `
|
||||
Object {
|
||||
"brokenMarkdownLinks": Array [],
|
||||
"newContent": "
|
||||
[a](/docs/a), [a](/docs/a), [b](/docs/b), [c](/docs/c)
|
||||
",
|
||||
}
|
||||
`;
|
||||
|
||||
exports[`replaceMarkdownLinks replaces reference style Markdown links 1`] = `
|
||||
Object {
|
||||
"brokenMarkdownLinks": Array [],
|
||||
"newContent": "
|
||||
The following operations are defined for [URI]s:
|
||||
|
||||
* [info]: Returns metadata about the resource,
|
||||
* [list]: Returns metadata about the resource's children (like getting the content of a local directory).
|
||||
|
||||
[URI]: /docs/api/classes/uri
|
||||
[info]: /docs/api/classes/uri#info
|
||||
[list]: /docs/api/classes/uri#list
|
||||
",
|
||||
}
|
||||
`;
|
|
@ -0,0 +1,200 @@
|
|||
// Jest Snapshot v1, https://goo.gl/fbAQLP
|
||||
|
||||
exports[`parseMarkdownString deletes only first heading 1`] = `
|
||||
Object {
|
||||
"content": "# Markdown Title
|
||||
|
||||
test test test # test bar
|
||||
|
||||
# Markdown Title 2
|
||||
|
||||
### Markdown Title h3",
|
||||
"contentTitle": "Markdown Title",
|
||||
"excerpt": "test test test # test bar",
|
||||
"frontMatter": Object {},
|
||||
}
|
||||
`;
|
||||
|
||||
exports[`parseMarkdownString deletes only first heading 2 1`] = `
|
||||
Object {
|
||||
"content": "# test
|
||||
|
||||
test test test test test test
|
||||
test test test # test bar
|
||||
# test2
|
||||
### test
|
||||
test3",
|
||||
"contentTitle": "test",
|
||||
"excerpt": "test test test test test test",
|
||||
"frontMatter": Object {},
|
||||
}
|
||||
`;
|
||||
|
||||
exports[`parseMarkdownString does not warn for duplicate title if markdown title is not at the top 1`] = `
|
||||
Object {
|
||||
"content": "foo
|
||||
|
||||
# Markdown Title",
|
||||
"contentTitle": undefined,
|
||||
"excerpt": "foo",
|
||||
"frontMatter": Object {
|
||||
"title": "Frontmatter title",
|
||||
},
|
||||
}
|
||||
`;
|
||||
|
||||
exports[`parseMarkdownString handles code blocks 1`] = `
|
||||
Object {
|
||||
"content": "\`\`\`js
|
||||
code
|
||||
\`\`\`
|
||||
|
||||
Content",
|
||||
"contentTitle": undefined,
|
||||
"excerpt": "Content",
|
||||
"frontMatter": Object {},
|
||||
}
|
||||
`;
|
||||
|
||||
exports[`parseMarkdownString handles code blocks 2`] = `
|
||||
Object {
|
||||
"content": "\`\`\`\`js
|
||||
Foo
|
||||
\`\`\`diff
|
||||
code
|
||||
\`\`\`
|
||||
Bar
|
||||
\`\`\`\`
|
||||
|
||||
Content",
|
||||
"contentTitle": undefined,
|
||||
"excerpt": "Content",
|
||||
"frontMatter": Object {},
|
||||
}
|
||||
`;
|
||||
|
||||
exports[`parseMarkdownString handles code blocks 3`] = `
|
||||
Object {
|
||||
"content": "\`\`\`\`js
|
||||
Foo
|
||||
\`\`\`diff
|
||||
code
|
||||
\`\`\`\`
|
||||
|
||||
Content",
|
||||
"contentTitle": undefined,
|
||||
"excerpt": "Content",
|
||||
"frontMatter": Object {},
|
||||
}
|
||||
`;
|
||||
|
||||
exports[`parseMarkdownString ignores markdown title if its not a first text 1`] = `
|
||||
Object {
|
||||
"content": "foo
|
||||
# test",
|
||||
"contentTitle": undefined,
|
||||
"excerpt": "foo",
|
||||
"frontMatter": Object {},
|
||||
}
|
||||
`;
|
||||
|
||||
exports[`parseMarkdownString parse markdown with front matter 1`] = `
|
||||
Object {
|
||||
"content": "Some text",
|
||||
"contentTitle": undefined,
|
||||
"excerpt": "Some text",
|
||||
"frontMatter": Object {
|
||||
"title": "Frontmatter title",
|
||||
},
|
||||
}
|
||||
`;
|
||||
|
||||
exports[`parseMarkdownString parses first heading as contentTitle 1`] = `
|
||||
Object {
|
||||
"content": "# Markdown Title
|
||||
|
||||
Some text",
|
||||
"contentTitle": "Markdown Title",
|
||||
"excerpt": "Some text",
|
||||
"frontMatter": Object {},
|
||||
}
|
||||
`;
|
||||
|
||||
exports[`parseMarkdownString parses front-matter and ignore h2 1`] = `
|
||||
Object {
|
||||
"content": "## test",
|
||||
"contentTitle": undefined,
|
||||
"excerpt": "test",
|
||||
"frontMatter": Object {
|
||||
"title": "Frontmatter title",
|
||||
},
|
||||
}
|
||||
`;
|
||||
|
||||
exports[`parseMarkdownString parses title only 1`] = `
|
||||
Object {
|
||||
"content": "# test",
|
||||
"contentTitle": "test",
|
||||
"excerpt": undefined,
|
||||
"frontMatter": Object {},
|
||||
}
|
||||
`;
|
||||
|
||||
exports[`parseMarkdownString parses title only alternate 1`] = `
|
||||
Object {
|
||||
"content": "test
|
||||
===",
|
||||
"contentTitle": "test",
|
||||
"excerpt": undefined,
|
||||
"frontMatter": Object {},
|
||||
}
|
||||
`;
|
||||
|
||||
exports[`parseMarkdownString reads front matter only 1`] = `
|
||||
Object {
|
||||
"content": "",
|
||||
"contentTitle": undefined,
|
||||
"excerpt": undefined,
|
||||
"frontMatter": Object {
|
||||
"title": "test",
|
||||
},
|
||||
}
|
||||
`;
|
||||
|
||||
exports[`parseMarkdownString warns about duplicate titles (front matter + markdown alternate) 1`] = `
|
||||
Object {
|
||||
"content": "Markdown Title alternate
|
||||
================
|
||||
|
||||
Some text",
|
||||
"contentTitle": "Markdown Title alternate",
|
||||
"excerpt": "Some text",
|
||||
"frontMatter": Object {
|
||||
"title": "Frontmatter title",
|
||||
},
|
||||
}
|
||||
`;
|
||||
|
||||
exports[`parseMarkdownString warns about duplicate titles (front matter + markdown) 1`] = `
|
||||
Object {
|
||||
"content": "# Markdown Title
|
||||
|
||||
Some text",
|
||||
"contentTitle": "Markdown Title",
|
||||
"excerpt": "Some text",
|
||||
"frontMatter": Object {
|
||||
"title": "Frontmatter title",
|
||||
},
|
||||
}
|
||||
`;
|
||||
|
||||
exports[`parseMarkdownString warns about duplicate titles 1`] = `
|
||||
Object {
|
||||
"content": "# test",
|
||||
"contentTitle": "test",
|
||||
"excerpt": undefined,
|
||||
"frontMatter": Object {
|
||||
"title": "Frontmatter title",
|
||||
},
|
||||
}
|
||||
`;
|
|
@ -22,7 +22,7 @@ describe('getDataFilePath', () => {
|
|||
const contentPathEmpty = path.join(fixturesDir, 'contentPathEmpty');
|
||||
const contentPathNestedYml = path.join(fixturesDir, 'contentPathNestedYml');
|
||||
|
||||
test('getDataFilePath returns localized Yml path in priority', async () => {
|
||||
it('getDataFilePath returns localized Yml path in priority', async () => {
|
||||
await expect(
|
||||
getDataFilePath({
|
||||
filePath: 'authors.yml',
|
||||
|
@ -43,7 +43,7 @@ describe('getDataFilePath', () => {
|
|||
).resolves.toEqual(path.join(contentPathYml2, 'authors.yml'));
|
||||
});
|
||||
|
||||
test('getDataFilePath returns localized Json path in priority', async () => {
|
||||
it('getDataFilePath returns localized Json path in priority', async () => {
|
||||
await expect(
|
||||
getDataFilePath({
|
||||
filePath: 'authors.json',
|
||||
|
@ -64,7 +64,7 @@ describe('getDataFilePath', () => {
|
|||
).resolves.toEqual(path.join(contentPathJson2, 'authors.json'));
|
||||
});
|
||||
|
||||
test('getDataFilePath returns unlocalized Yml path as fallback', async () => {
|
||||
it('getDataFilePath returns unlocalized Yml path as fallback', async () => {
|
||||
await expect(
|
||||
getDataFilePath({
|
||||
filePath: 'authors.yml',
|
||||
|
@ -76,7 +76,7 @@ describe('getDataFilePath', () => {
|
|||
).resolves.toEqual(path.join(contentPathYml2, 'authors.yml'));
|
||||
});
|
||||
|
||||
test('getDataFilePath returns unlocalized Json path as fallback', async () => {
|
||||
it('getDataFilePath returns unlocalized Json path as fallback', async () => {
|
||||
await expect(
|
||||
getDataFilePath({
|
||||
filePath: 'authors.json',
|
||||
|
@ -88,7 +88,7 @@ describe('getDataFilePath', () => {
|
|||
).resolves.toEqual(path.join(contentPathJson1, 'authors.json'));
|
||||
});
|
||||
|
||||
test('getDataFilePath can return undefined (file not found)', async () => {
|
||||
it('getDataFilePath can return undefined (file not found)', async () => {
|
||||
await expect(
|
||||
getDataFilePath({
|
||||
filePath: 'authors.json',
|
||||
|
@ -109,7 +109,7 @@ describe('getDataFilePath', () => {
|
|||
).resolves.toBeUndefined();
|
||||
});
|
||||
|
||||
test('getDataFilePath can return nested path', async () => {
|
||||
it('getDataFilePath can return nested path', async () => {
|
||||
await expect(
|
||||
getDataFilePath({
|
||||
filePath: 'sub/folder/authors.yml',
|
||||
|
@ -143,25 +143,25 @@ describe('getDataFileData', () => {
|
|||
);
|
||||
}
|
||||
|
||||
test('returns undefined for nonexistent file', async () => {
|
||||
it('returns undefined for nonexistent file', async () => {
|
||||
await expect(readDataFile('nonexistent.yml')).resolves.toBeUndefined();
|
||||
});
|
||||
|
||||
test('read valid yml author file', async () => {
|
||||
it('read valid yml author file', async () => {
|
||||
await expect(readDataFile('valid.yml')).resolves.toEqual({a: 1});
|
||||
});
|
||||
|
||||
test('read valid json author file', async () => {
|
||||
it('read valid json author file', async () => {
|
||||
await expect(readDataFile('valid.json')).resolves.toEqual({a: 1});
|
||||
});
|
||||
|
||||
test('fail to read invalid yml', async () => {
|
||||
it('fail to read invalid yml', async () => {
|
||||
await expect(
|
||||
readDataFile('bad.yml'),
|
||||
).rejects.toThrowErrorMatchingInlineSnapshot(`"Nope"`);
|
||||
});
|
||||
|
||||
test('fail to read invalid json', async () => {
|
||||
it('fail to read invalid json', async () => {
|
||||
await expect(
|
||||
readDataFile('bad.json'),
|
||||
).rejects.toThrowErrorMatchingInlineSnapshot(`"Nope"`);
|
||||
|
@ -169,7 +169,7 @@ describe('getDataFileData', () => {
|
|||
});
|
||||
|
||||
describe('findFolderContainingFile', () => {
|
||||
test('find appropriate folder', async () => {
|
||||
it('find appropriate folder', async () => {
|
||||
await expect(
|
||||
findFolderContainingFile(
|
||||
['/abcdef', '/gehij', __dirname, '/klmn'],
|
||||
|
@ -178,7 +178,7 @@ describe('findFolderContainingFile', () => {
|
|||
).resolves.toEqual(__dirname);
|
||||
});
|
||||
|
||||
test('return undefined if no folder contain such file', async () => {
|
||||
it('return undefined if no folder contain such file', async () => {
|
||||
await expect(
|
||||
findFolderContainingFile(['/abcdef', '/gehij', '/klmn'], 'index.test.ts'),
|
||||
).resolves.toBeUndefined();
|
||||
|
@ -186,7 +186,7 @@ describe('findFolderContainingFile', () => {
|
|||
});
|
||||
|
||||
describe('getFolderContainingFile', () => {
|
||||
test('get appropriate folder', async () => {
|
||||
it('get appropriate folder', async () => {
|
||||
await expect(
|
||||
getFolderContainingFile(
|
||||
['/abcdef', '/gehij', __dirname, '/klmn'],
|
||||
|
@ -195,7 +195,7 @@ describe('getFolderContainingFile', () => {
|
|||
).resolves.toEqual(__dirname);
|
||||
});
|
||||
|
||||
test('throw if no folder contain such file', async () => {
|
||||
it('throw if no folder contain such file', async () => {
|
||||
await expect(
|
||||
getFolderContainingFile(
|
||||
['/abcdef', '/gehij', '/klmn'],
|
||||
|
|
|
@ -10,53 +10,57 @@ import {genChunkName, readOutputHTMLFile, generate} from '../emitUtils';
|
|||
import path from 'path';
|
||||
import fs from 'fs-extra';
|
||||
|
||||
test('genChunkName', () => {
|
||||
const firstAssert: Record<string, string> = {
|
||||
'/docs/adding-blog': 'docs-adding-blog-062',
|
||||
'/docs/versioning': 'docs-versioning-8a8',
|
||||
'/': 'index',
|
||||
'/blog/2018/04/30/How-I-Converted-Profilo-To-Docusaurus':
|
||||
'blog-2018-04-30-how-i-converted-profilo-to-docusaurus-4f2',
|
||||
'/youtube': 'youtube-429',
|
||||
'/users/en/': 'users-en-f7a',
|
||||
'/blog': 'blog-c06',
|
||||
};
|
||||
Object.keys(firstAssert).forEach((str) => {
|
||||
expect(genChunkName(str)).toBe(firstAssert[str]);
|
||||
describe('genChunkName', () => {
|
||||
it('works', () => {
|
||||
const firstAssert: Record<string, string> = {
|
||||
'/docs/adding-blog': 'docs-adding-blog-062',
|
||||
'/docs/versioning': 'docs-versioning-8a8',
|
||||
'/': 'index',
|
||||
'/blog/2018/04/30/How-I-Converted-Profilo-To-Docusaurus':
|
||||
'blog-2018-04-30-how-i-converted-profilo-to-docusaurus-4f2',
|
||||
'/youtube': 'youtube-429',
|
||||
'/users/en/': 'users-en-f7a',
|
||||
'/blog': 'blog-c06',
|
||||
};
|
||||
Object.keys(firstAssert).forEach((str) => {
|
||||
expect(genChunkName(str)).toBe(firstAssert[str]);
|
||||
});
|
||||
});
|
||||
|
||||
// Don't allow different chunk name for same path.
|
||||
expect(genChunkName('path/is/similar', 'oldPrefix')).toEqual(
|
||||
genChunkName('path/is/similar', 'newPrefix'),
|
||||
);
|
||||
|
||||
// Even with same preferred name, still different chunk name for
|
||||
// different path
|
||||
const secondAssert: Record<string, string> = {
|
||||
'/blog/1': 'blog-85-f-089',
|
||||
'/blog/2': 'blog-353-489',
|
||||
};
|
||||
Object.keys(secondAssert).forEach((str) => {
|
||||
expect(genChunkName(str, undefined, 'blog')).toBe(secondAssert[str]);
|
||||
});
|
||||
|
||||
// Only generate short unique id
|
||||
const thirdAssert: Record<string, string> = {
|
||||
a: '0cc175b9',
|
||||
b: '92eb5ffe',
|
||||
c: '4a8a08f0',
|
||||
d: '8277e091',
|
||||
};
|
||||
Object.keys(thirdAssert).forEach((str) => {
|
||||
expect(genChunkName(str, undefined, undefined, true)).toBe(
|
||||
thirdAssert[str],
|
||||
it("doesn't allow different chunk name for same path", () => {
|
||||
expect(genChunkName('path/is/similar', 'oldPrefix')).toEqual(
|
||||
genChunkName('path/is/similar', 'newPrefix'),
|
||||
);
|
||||
});
|
||||
expect(genChunkName('d', undefined, undefined, true)).toBe('8277e091');
|
||||
|
||||
it('emits different chunk names for different paths even with same preferred name', () => {
|
||||
const secondAssert: Record<string, string> = {
|
||||
'/blog/1': 'blog-85-f-089',
|
||||
'/blog/2': 'blog-353-489',
|
||||
};
|
||||
Object.keys(secondAssert).forEach((str) => {
|
||||
expect(genChunkName(str, undefined, 'blog')).toBe(secondAssert[str]);
|
||||
});
|
||||
});
|
||||
|
||||
it('only generates short unique IDs', () => {
|
||||
const thirdAssert: Record<string, string> = {
|
||||
a: '0cc175b9',
|
||||
b: '92eb5ffe',
|
||||
c: '4a8a08f0',
|
||||
d: '8277e091',
|
||||
};
|
||||
Object.keys(thirdAssert).forEach((str) => {
|
||||
expect(genChunkName(str, undefined, undefined, true)).toBe(
|
||||
thirdAssert[str],
|
||||
);
|
||||
});
|
||||
expect(genChunkName('d', undefined, undefined, true)).toBe('8277e091');
|
||||
});
|
||||
});
|
||||
|
||||
describe('readOutputHTMLFile', () => {
|
||||
test('trailing slash undefined', async () => {
|
||||
it('trailing slash undefined', async () => {
|
||||
await expect(
|
||||
readOutputHTMLFile(
|
||||
'/file',
|
||||
|
@ -86,7 +90,7 @@ describe('readOutputHTMLFile', () => {
|
|||
).then(String),
|
||||
).resolves.toEqual('folder\n');
|
||||
});
|
||||
test('trailing slash true', async () => {
|
||||
it('trailing slash true', async () => {
|
||||
await expect(
|
||||
readOutputHTMLFile(
|
||||
'/folder',
|
||||
|
@ -102,7 +106,7 @@ describe('readOutputHTMLFile', () => {
|
|||
).then(String),
|
||||
).resolves.toEqual('folder\n');
|
||||
});
|
||||
test('trailing slash false', async () => {
|
||||
it('trailing slash false', async () => {
|
||||
await expect(
|
||||
readOutputHTMLFile(
|
||||
'/file',
|
||||
|
@ -120,36 +124,40 @@ describe('readOutputHTMLFile', () => {
|
|||
});
|
||||
});
|
||||
|
||||
test('generate', async () => {
|
||||
describe('generate', () => {
|
||||
const writeMock = jest.spyOn(fs, 'outputFile').mockImplementation(() => {});
|
||||
const existsMock = jest.spyOn(fs, 'pathExists');
|
||||
const readMock = jest.spyOn(fs, 'readFile');
|
||||
|
||||
// First call: no file, no cache
|
||||
existsMock.mockImplementationOnce(() => false);
|
||||
await generate(__dirname, 'foo', 'bar');
|
||||
expect(writeMock).toHaveBeenNthCalledWith(
|
||||
1,
|
||||
path.join(__dirname, 'foo'),
|
||||
'bar',
|
||||
);
|
||||
it('works with no file and no cache', async () => {
|
||||
existsMock.mockImplementationOnce(() => false);
|
||||
await generate(__dirname, 'foo', 'bar');
|
||||
expect(writeMock).toHaveBeenNthCalledWith(
|
||||
1,
|
||||
path.join(__dirname, 'foo'),
|
||||
'bar',
|
||||
);
|
||||
});
|
||||
|
||||
// Second call: cache exists
|
||||
await generate(__dirname, 'foo', 'bar');
|
||||
expect(writeMock).toBeCalledTimes(1);
|
||||
it('works with existing cache', async () => {
|
||||
await generate(__dirname, 'foo', 'bar');
|
||||
expect(writeMock).toBeCalledTimes(1);
|
||||
});
|
||||
|
||||
// Generate another: file exists, cache doesn't
|
||||
existsMock.mockImplementationOnce(() => true);
|
||||
// @ts-expect-error: seems the typedef doesn't understand overload
|
||||
readMock.mockImplementationOnce(() => Promise.resolve('bar'));
|
||||
await generate(__dirname, 'baz', 'bar');
|
||||
expect(writeMock).toBeCalledTimes(1);
|
||||
it('works with existing file but no cache', async () => {
|
||||
existsMock.mockImplementationOnce(() => true);
|
||||
// @ts-expect-error: seems the typedef doesn't understand overload
|
||||
readMock.mockImplementationOnce(() => Promise.resolve('bar'));
|
||||
await generate(__dirname, 'baz', 'bar');
|
||||
expect(writeMock).toBeCalledTimes(1);
|
||||
});
|
||||
|
||||
// Generate again: force skip cache
|
||||
await generate(__dirname, 'foo', 'bar', true);
|
||||
expect(writeMock).toHaveBeenNthCalledWith(
|
||||
2,
|
||||
path.join(__dirname, 'foo'),
|
||||
'bar',
|
||||
);
|
||||
it('works when force skipping cache', async () => {
|
||||
await generate(__dirname, 'foo', 'bar', true);
|
||||
expect(writeMock).toHaveBeenNthCalledWith(
|
||||
2,
|
||||
path.join(__dirname, 'foo'),
|
||||
'bar',
|
||||
);
|
||||
});
|
||||
});
|
||||
|
|
|
@ -14,7 +14,7 @@ import {
|
|||
describe('createMatcher', () => {
|
||||
const matcher = createMatcher(GlobExcludeDefault);
|
||||
|
||||
test('match default exclude MD/MDX partials correctly', () => {
|
||||
it('match default exclude MD/MDX partials correctly', () => {
|
||||
expect(matcher('doc.md')).toEqual(false);
|
||||
expect(matcher('category/doc.md')).toEqual(false);
|
||||
expect(matcher('category/subcategory/doc.md')).toEqual(false);
|
||||
|
@ -31,7 +31,7 @@ describe('createMatcher', () => {
|
|||
expect(matcher('category/_subcategory/doc.md')).toEqual(true);
|
||||
});
|
||||
|
||||
test('match default exclude tests correctly', () => {
|
||||
it('match default exclude tests correctly', () => {
|
||||
expect(matcher('xyz.js')).toEqual(false);
|
||||
expect(matcher('xyz.ts')).toEqual(false);
|
||||
expect(matcher('xyz.jsx')).toEqual(false);
|
||||
|
@ -73,7 +73,7 @@ describe('createAbsoluteFilePathMatcher', () => {
|
|||
rootFolders,
|
||||
);
|
||||
|
||||
test('match default exclude MD/MDX partials correctly', () => {
|
||||
it('match default exclude MD/MDX partials correctly', () => {
|
||||
expect(matcher('/_root/docs/myDoc.md')).toEqual(false);
|
||||
expect(matcher('/_root/docs/myDoc.mdx')).toEqual(false);
|
||||
expect(matcher('/root/_docs/myDoc.md')).toEqual(false);
|
||||
|
@ -93,13 +93,13 @@ describe('createAbsoluteFilePathMatcher', () => {
|
|||
expect(matcher('/root/_docs/_category/myDoc.mdx')).toEqual(true);
|
||||
});
|
||||
|
||||
test('match default exclude tests correctly', () => {
|
||||
it('match default exclude tests correctly', () => {
|
||||
expect(matcher('/__test__/website/src/xyz.js')).toEqual(false);
|
||||
expect(matcher('/__test__/website/src/__test__/xyz.js')).toEqual(true);
|
||||
expect(matcher('/__test__/website/src/xyz.test.js')).toEqual(true);
|
||||
});
|
||||
|
||||
test('throw if file is not contained in any root doc', () => {
|
||||
it('throw if file is not contained in any root doc', () => {
|
||||
expect(() =>
|
||||
matcher('/bad/path/myDoc.md'),
|
||||
).toThrowErrorMatchingInlineSnapshot(
|
||||
|
|
|
@ -8,7 +8,7 @@
|
|||
import {simpleHash, docuHash} from '../hashUtils';
|
||||
|
||||
describe('hashUtils', () => {
|
||||
test('simpleHash', () => {
|
||||
it('simpleHash', () => {
|
||||
const asserts: Record<string, string> = {
|
||||
'': 'd41',
|
||||
'/foo-bar': '096',
|
||||
|
@ -29,7 +29,7 @@ describe('hashUtils', () => {
|
|||
});
|
||||
|
||||
describe('docuHash', () => {
|
||||
test('docuHash works', () => {
|
||||
it('docuHash works', () => {
|
||||
const asserts: Record<string, string> = {
|
||||
'': '-d41',
|
||||
'/': 'index',
|
||||
|
|
|
@ -11,52 +11,56 @@ import {
|
|||
getPluginI18nPath,
|
||||
} from '../i18nUtils';
|
||||
|
||||
test('mergeTranslations', () => {
|
||||
expect(
|
||||
mergeTranslations([
|
||||
{
|
||||
T1: {message: 'T1 message', description: 'T1 desc'},
|
||||
T2: {message: 'T2 message', description: 'T2 desc'},
|
||||
T3: {message: 'T3 message', description: 'T3 desc'},
|
||||
},
|
||||
{
|
||||
T4: {message: 'T4 message', description: 'T4 desc'},
|
||||
},
|
||||
{T2: {message: 'T2 message 2', description: 'T2 desc 2'}},
|
||||
]),
|
||||
).toEqual({
|
||||
T1: {message: 'T1 message', description: 'T1 desc'},
|
||||
T2: {message: 'T2 message 2', description: 'T2 desc 2'},
|
||||
T3: {message: 'T3 message', description: 'T3 desc'},
|
||||
T4: {message: 'T4 message', description: 'T4 desc'},
|
||||
describe('mergeTranslations', () => {
|
||||
it('works', () => {
|
||||
expect(
|
||||
mergeTranslations([
|
||||
{
|
||||
T1: {message: 'T1 message', description: 'T1 desc'},
|
||||
T2: {message: 'T2 message', description: 'T2 desc'},
|
||||
T3: {message: 'T3 message', description: 'T3 desc'},
|
||||
},
|
||||
{
|
||||
T4: {message: 'T4 message', description: 'T4 desc'},
|
||||
},
|
||||
{T2: {message: 'T2 message 2', description: 'T2 desc 2'}},
|
||||
]),
|
||||
).toEqual({
|
||||
T1: {message: 'T1 message', description: 'T1 desc'},
|
||||
T2: {message: 'T2 message 2', description: 'T2 desc 2'},
|
||||
T3: {message: 'T3 message', description: 'T3 desc'},
|
||||
T4: {message: 'T4 message', description: 'T4 desc'},
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
test('updateTranslationFileMessages', () => {
|
||||
expect(
|
||||
updateTranslationFileMessages(
|
||||
{
|
||||
path: 'abc',
|
||||
content: {
|
||||
t1: {message: 't1 message', description: 't1 desc'},
|
||||
t2: {message: 't2 message', description: 't2 desc'},
|
||||
t3: {message: 't3 message', description: 't3 desc'},
|
||||
describe('updateTranslationFileMessages', () => {
|
||||
it('works', () => {
|
||||
expect(
|
||||
updateTranslationFileMessages(
|
||||
{
|
||||
path: 'abc',
|
||||
content: {
|
||||
t1: {message: 't1 message', description: 't1 desc'},
|
||||
t2: {message: 't2 message', description: 't2 desc'},
|
||||
t3: {message: 't3 message', description: 't3 desc'},
|
||||
},
|
||||
},
|
||||
(message) => `prefix ${message} suffix`,
|
||||
),
|
||||
).toEqual({
|
||||
path: 'abc',
|
||||
content: {
|
||||
t1: {message: 'prefix t1 message suffix', description: 't1 desc'},
|
||||
t2: {message: 'prefix t2 message suffix', description: 't2 desc'},
|
||||
t3: {message: 'prefix t3 message suffix', description: 't3 desc'},
|
||||
},
|
||||
(message) => `prefix ${message} suffix`,
|
||||
),
|
||||
).toEqual({
|
||||
path: 'abc',
|
||||
content: {
|
||||
t1: {message: 'prefix t1 message suffix', description: 't1 desc'},
|
||||
t2: {message: 'prefix t2 message suffix', description: 't2 desc'},
|
||||
t3: {message: 'prefix t3 message suffix', description: 't3 desc'},
|
||||
},
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('getPluginI18nPath', () => {
|
||||
test('gets correct path', () => {
|
||||
it('gets correct path', () => {
|
||||
expect(
|
||||
getPluginI18nPath({
|
||||
siteDir: __dirname,
|
||||
|
@ -69,7 +73,7 @@ describe('getPluginI18nPath', () => {
|
|||
`"<PROJECT_ROOT>/packages/docusaurus-utils/src/__tests__/i18n/zh-Hans/plugin-content-docs-community/foo"`,
|
||||
);
|
||||
});
|
||||
test('gets correct path for default plugin', () => {
|
||||
it('gets correct path for default plugin', () => {
|
||||
expect(
|
||||
getPluginI18nPath({
|
||||
siteDir: __dirname,
|
||||
|
@ -79,7 +83,7 @@ describe('getPluginI18nPath', () => {
|
|||
}).replace(__dirname, ''),
|
||||
).toMatchInlineSnapshot(`"/i18n/zh-Hans/plugin-content-docs/foo"`);
|
||||
});
|
||||
test('gets correct path when no subpaths', () => {
|
||||
it('gets correct path when no subpaths', () => {
|
||||
expect(
|
||||
getPluginI18nPath({
|
||||
siteDir: __dirname,
|
||||
|
|
|
@ -17,37 +17,29 @@ import {
|
|||
import _ from 'lodash';
|
||||
|
||||
describe('removeSuffix', () => {
|
||||
test('should no-op 1', () => {
|
||||
it("is no-op when suffix doesn't exist", () => {
|
||||
expect(removeSuffix('abcdef', 'ijk')).toEqual('abcdef');
|
||||
});
|
||||
test('should no-op 2', () => {
|
||||
expect(removeSuffix('abcdef', 'abc')).toEqual('abcdef');
|
||||
});
|
||||
test('should no-op 3', () => {
|
||||
expect(removeSuffix('abcdef', '')).toEqual('abcdef');
|
||||
});
|
||||
test('should remove suffix', () => {
|
||||
it('removes suffix', () => {
|
||||
expect(removeSuffix('abcdef', 'ef')).toEqual('abcd');
|
||||
});
|
||||
});
|
||||
|
||||
describe('removePrefix', () => {
|
||||
test('should no-op 1', () => {
|
||||
it("is no-op when prefix doesn't exist", () => {
|
||||
expect(removePrefix('abcdef', 'ijk')).toEqual('abcdef');
|
||||
});
|
||||
test('should no-op 2', () => {
|
||||
expect(removePrefix('abcdef', 'def')).toEqual('abcdef');
|
||||
});
|
||||
test('should no-op 3', () => {
|
||||
expect(removePrefix('abcdef', '')).toEqual('abcdef');
|
||||
});
|
||||
test('should remove prefix', () => {
|
||||
it('removes prefix', () => {
|
||||
expect(removePrefix('abcdef', 'ab')).toEqual('cdef');
|
||||
});
|
||||
});
|
||||
|
||||
describe('getElementsAround', () => {
|
||||
test('can return elements around', () => {
|
||||
it('returns elements around', () => {
|
||||
expect(getElementsAround(['a', 'b', 'c', 'd'], 0)).toEqual({
|
||||
previous: undefined,
|
||||
next: 'b',
|
||||
|
@ -66,7 +58,7 @@ describe('getElementsAround', () => {
|
|||
});
|
||||
});
|
||||
|
||||
test('throws if bad index is provided', () => {
|
||||
it('throws if bad index is provided', () => {
|
||||
expect(() =>
|
||||
getElementsAround(['a', 'b', 'c', 'd'], -1),
|
||||
).toThrowErrorMatchingInlineSnapshot(
|
||||
|
@ -87,7 +79,7 @@ describe('mapAsyncSequential', () => {
|
|||
});
|
||||
}
|
||||
|
||||
test('map sequentially', async () => {
|
||||
it('maps sequentially', async () => {
|
||||
const itemToTimeout: Record<string, number> = {
|
||||
'1': 200,
|
||||
'2': 600,
|
||||
|
@ -132,7 +124,7 @@ describe('findAsyncSequential', () => {
|
|||
});
|
||||
}
|
||||
|
||||
test('find sequentially', async () => {
|
||||
it('finds sequentially', async () => {
|
||||
const items = ['1', '2', '3'];
|
||||
|
||||
const findFn = jest.fn(async (item: string) => {
|
||||
|
@ -155,7 +147,7 @@ describe('findAsyncSequential', () => {
|
|||
});
|
||||
|
||||
describe('reportMessage', () => {
|
||||
test('all severities', () => {
|
||||
it('works with all severities', () => {
|
||||
const consoleLog = jest.spyOn(console, 'info').mockImplementation(() => {});
|
||||
const consoleWarn = jest
|
||||
.spyOn(console, 'warn')
|
||||
|
|
|
@ -8,7 +8,7 @@
|
|||
import {replaceMarkdownLinks} from '../markdownLinks';
|
||||
|
||||
describe('replaceMarkdownLinks', () => {
|
||||
test('basic replace', () => {
|
||||
it('does basic replace', () => {
|
||||
expect(
|
||||
replaceMarkdownLinks({
|
||||
siteDir: '.',
|
||||
|
@ -35,34 +35,10 @@ describe('replaceMarkdownLinks', () => {
|
|||
[nonexistent](hmmm.md)
|
||||
`,
|
||||
}),
|
||||
).toMatchInlineSnapshot(`
|
||||
Object {
|
||||
"brokenMarkdownLinks": Array [
|
||||
Object {
|
||||
"contentPaths": Object {
|
||||
"contentPath": "docs",
|
||||
"contentPathLocalized": "i18n/docs-localized",
|
||||
},
|
||||
"filePath": "docs/intro.md",
|
||||
"link": "hmmm.md",
|
||||
},
|
||||
],
|
||||
"newContent": "
|
||||
[foo](/doc/foo)
|
||||
[baz](/doc/baz)
|
||||
[foo](/doc/foo)
|
||||
[http](http://github.com/facebook/docusaurus/README.md)
|
||||
[https](https://github.com/facebook/docusaurus/README.md)
|
||||
[asset](./foo.js)
|
||||
[asset as well](@site/docs/_partial.md)
|
||||
[looks like http...](/doc/http)
|
||||
[nonexistent](hmmm.md)
|
||||
",
|
||||
}
|
||||
`);
|
||||
).toMatchSnapshot();
|
||||
});
|
||||
|
||||
test('reference style Markdown links', () => {
|
||||
it('replaces reference style Markdown links', () => {
|
||||
expect(
|
||||
replaceMarkdownLinks({
|
||||
siteDir: '.',
|
||||
|
@ -88,25 +64,11 @@ The following operations are defined for [URI]s:
|
|||
[list]: ../api/classes/divine_uri.URI.md#list
|
||||
`,
|
||||
}),
|
||||
).toMatchInlineSnapshot(`
|
||||
Object {
|
||||
"brokenMarkdownLinks": Array [],
|
||||
"newContent": "
|
||||
The following operations are defined for [URI]s:
|
||||
|
||||
* [info]: Returns metadata about the resource,
|
||||
* [list]: Returns metadata about the resource's children (like getting the content of a local directory).
|
||||
|
||||
[URI]: /docs/api/classes/uri
|
||||
[info]: /docs/api/classes/uri#info
|
||||
[list]: /docs/api/classes/uri#list
|
||||
",
|
||||
}
|
||||
`);
|
||||
).toMatchSnapshot();
|
||||
});
|
||||
|
||||
// TODO bad
|
||||
test('links in HTML comments', () => {
|
||||
it('ignores links in HTML comments', () => {
|
||||
expect(
|
||||
replaceMarkdownLinks({
|
||||
siteDir: '.',
|
||||
|
@ -125,37 +87,10 @@ The following operations are defined for [URI]s:
|
|||
-->
|
||||
`,
|
||||
}),
|
||||
).toMatchInlineSnapshot(`
|
||||
Object {
|
||||
"brokenMarkdownLinks": Array [
|
||||
Object {
|
||||
"contentPaths": Object {
|
||||
"contentPath": "docs",
|
||||
"contentPathLocalized": "i18n/docs-localized",
|
||||
},
|
||||
"filePath": "docs/intro.md",
|
||||
"link": "./foo.md",
|
||||
},
|
||||
Object {
|
||||
"contentPaths": Object {
|
||||
"contentPath": "docs",
|
||||
"contentPathLocalized": "i18n/docs-localized",
|
||||
},
|
||||
"filePath": "docs/intro.md",
|
||||
"link": "./foo.md",
|
||||
},
|
||||
],
|
||||
"newContent": "
|
||||
<!-- [foo](./foo.md) -->
|
||||
<!--
|
||||
[foo](./foo.md)
|
||||
-->
|
||||
",
|
||||
}
|
||||
`);
|
||||
).toMatchSnapshot();
|
||||
});
|
||||
|
||||
test('links in fenced blocks', () => {
|
||||
it('ignores links in fenced blocks', () => {
|
||||
expect(
|
||||
replaceMarkdownLinks({
|
||||
siteDir: '.',
|
||||
|
@ -187,34 +122,11 @@ The following operations are defined for [URI]s:
|
|||
\`\`\`\`
|
||||
`,
|
||||
}),
|
||||
).toMatchInlineSnapshot(`
|
||||
Object {
|
||||
"brokenMarkdownLinks": Array [],
|
||||
"newContent": "
|
||||
\`\`\`
|
||||
[foo](foo.md)
|
||||
\`\`\`
|
||||
|
||||
\`\`\`\`js
|
||||
[foo](foo.md)
|
||||
\`\`\`
|
||||
[foo](foo.md)
|
||||
\`\`\`
|
||||
[foo](foo.md)
|
||||
\`\`\`\`
|
||||
|
||||
\`\`\`\`js
|
||||
[foo](foo.md)
|
||||
\`\`\`
|
||||
[foo](foo.md)
|
||||
\`\`\`\`
|
||||
",
|
||||
}
|
||||
`);
|
||||
).toMatchSnapshot();
|
||||
});
|
||||
|
||||
// TODO bad
|
||||
test('links in inline code', () => {
|
||||
it('ignores links in inline code', () => {
|
||||
expect(
|
||||
replaceMarkdownLinks({
|
||||
siteDir: '.',
|
||||
|
@ -230,27 +142,11 @@ The following operations are defined for [URI]s:
|
|||
\`[foo](foo.md)\`
|
||||
`,
|
||||
}),
|
||||
).toMatchInlineSnapshot(`
|
||||
Object {
|
||||
"brokenMarkdownLinks": Array [
|
||||
Object {
|
||||
"contentPaths": Object {
|
||||
"contentPath": "docs",
|
||||
"contentPathLocalized": "i18n/docs-localized",
|
||||
},
|
||||
"filePath": "docs/intro.md",
|
||||
"link": "foo.md",
|
||||
},
|
||||
],
|
||||
"newContent": "
|
||||
\`[foo](foo.md)\`
|
||||
",
|
||||
}
|
||||
`);
|
||||
).toMatchSnapshot();
|
||||
});
|
||||
|
||||
// TODO bad
|
||||
test('links with same title as URL', () => {
|
||||
it('replaces links with same title as URL', () => {
|
||||
expect(
|
||||
replaceMarkdownLinks({
|
||||
siteDir: '.',
|
||||
|
@ -270,20 +166,10 @@ The following operations are defined for [URI]s:
|
|||
[./foo.md](foo.md)
|
||||
`,
|
||||
}),
|
||||
).toMatchInlineSnapshot(`
|
||||
Object {
|
||||
"brokenMarkdownLinks": Array [],
|
||||
"newContent": "
|
||||
[/docs/foo](foo.md)
|
||||
[/docs/foo](./foo.md)
|
||||
[foo.md](/docs/foo)
|
||||
[.//docs/foo](foo.md)
|
||||
",
|
||||
}
|
||||
`);
|
||||
).toMatchSnapshot();
|
||||
});
|
||||
|
||||
test('multiple links on same line', () => {
|
||||
it('replaces multiple links on same line', () => {
|
||||
expect(
|
||||
replaceMarkdownLinks({
|
||||
siteDir: '.',
|
||||
|
@ -302,13 +188,6 @@ The following operations are defined for [URI]s:
|
|||
[a](a.md), [a](a.md), [b](b.md), [c](c.md)
|
||||
`,
|
||||
}),
|
||||
).toMatchInlineSnapshot(`
|
||||
Object {
|
||||
"brokenMarkdownLinks": Array [],
|
||||
"newContent": "
|
||||
[a](/docs/a), [a](/docs/a), [b](/docs/b), [c](/docs/c)
|
||||
",
|
||||
}
|
||||
`);
|
||||
).toMatchSnapshot();
|
||||
});
|
||||
});
|
||||
|
|
|
@ -14,7 +14,7 @@ import {
|
|||
import dedent from 'dedent';
|
||||
|
||||
describe('createExcerpt', () => {
|
||||
test('should create excerpt for text-only content', () => {
|
||||
it('creates excerpt for text-only content', () => {
|
||||
expect(
|
||||
createExcerpt(dedent`
|
||||
Lorem ipsum dolor sit amet, consectetur adipiscing elit. Vestibulum ex urna, molestie et sagittis ut, varius ac justo.
|
||||
|
@ -26,7 +26,7 @@ describe('createExcerpt', () => {
|
|||
);
|
||||
});
|
||||
|
||||
test('should create excerpt for regular content with regular title', () => {
|
||||
it('creates excerpt for regular content with regular title', () => {
|
||||
expect(
|
||||
createExcerpt(dedent`
|
||||
|
||||
|
@ -43,7 +43,7 @@ describe('createExcerpt', () => {
|
|||
);
|
||||
});
|
||||
|
||||
test('should create excerpt for regular content with alternate title', () => {
|
||||
it('creates excerpt for regular content with alternate title', () => {
|
||||
expect(
|
||||
createExcerpt(dedent`
|
||||
|
||||
|
@ -61,7 +61,7 @@ describe('createExcerpt', () => {
|
|||
);
|
||||
});
|
||||
|
||||
test('should create excerpt for content with h2 heading', () => {
|
||||
it('creates excerpt for content with h2 heading', () => {
|
||||
expect(
|
||||
createExcerpt(dedent`
|
||||
## Lorem ipsum dolor sit amet
|
||||
|
@ -71,7 +71,7 @@ describe('createExcerpt', () => {
|
|||
).toEqual('Lorem ipsum dolor sit amet');
|
||||
});
|
||||
|
||||
test('should create excerpt for content beginning with blockquote', () => {
|
||||
it('creates excerpt for content beginning with blockquote', () => {
|
||||
expect(
|
||||
createExcerpt(dedent`
|
||||
> Lorem ipsum dolor sit amet
|
||||
|
@ -81,7 +81,7 @@ describe('createExcerpt', () => {
|
|||
).toEqual('Lorem ipsum dolor sit amet');
|
||||
});
|
||||
|
||||
test('should create excerpt for content beginning with image (eg. blog post)', () => {
|
||||
it('creates excerpt for content beginning with image (eg. blog post)', () => {
|
||||
expect(
|
||||
createExcerpt(dedent`
|
||||

|
||||
|
@ -89,7 +89,7 @@ describe('createExcerpt', () => {
|
|||
).toEqual('Lorem ipsum');
|
||||
});
|
||||
|
||||
test('should create excerpt for content beginning with admonitions', () => {
|
||||
it('creates excerpt for content beginning with admonitions', () => {
|
||||
expect(
|
||||
createExcerpt(dedent`
|
||||
import Component from '@site/src/components/Component'
|
||||
|
@ -105,7 +105,7 @@ describe('createExcerpt', () => {
|
|||
).toEqual('Lorem ipsum dolor sit amet, consectetur adipiscing elit.');
|
||||
});
|
||||
|
||||
test('should create excerpt for content with imports/exports declarations and Markdown markup, as well as Emoji', () => {
|
||||
it('creates excerpt for content with imports/exports declarations and Markdown markup, as well as Emoji', () => {
|
||||
expect(
|
||||
createExcerpt(dedent`
|
||||
import Component from '@site/src/components/Component';
|
||||
|
@ -125,7 +125,7 @@ describe('createExcerpt', () => {
|
|||
);
|
||||
});
|
||||
|
||||
test('should create excerpt for heading specified with anchor-id syntax', () => {
|
||||
it('creates excerpt for heading specified with anchor-id syntax', () => {
|
||||
expect(
|
||||
createExcerpt(dedent`
|
||||
## Markdown title {#my-anchor-id}
|
||||
|
@ -133,7 +133,7 @@ describe('createExcerpt', () => {
|
|||
).toEqual('Markdown title');
|
||||
});
|
||||
|
||||
test('should create excerpt for content with various code blocks', () => {
|
||||
it('creates excerpt for content with various code blocks', () => {
|
||||
expect(
|
||||
createExcerpt(dedent`
|
||||
\`\`\`jsx
|
||||
|
@ -148,7 +148,7 @@ describe('createExcerpt', () => {
|
|||
});
|
||||
|
||||
describe('parseMarkdownContentTitle', () => {
|
||||
test('Should parse markdown h1 title at the top', () => {
|
||||
it('parses markdown h1 title at the top', () => {
|
||||
const markdown = dedent`
|
||||
|
||||
# Markdown Title
|
||||
|
@ -162,7 +162,7 @@ describe('parseMarkdownContentTitle', () => {
|
|||
});
|
||||
});
|
||||
|
||||
test('Should parse markdown h1 title at the top and remove it', () => {
|
||||
it('parses markdown h1 title at the top and remove it', () => {
|
||||
const markdown = dedent`
|
||||
|
||||
# Markdown Title
|
||||
|
@ -178,7 +178,7 @@ describe('parseMarkdownContentTitle', () => {
|
|||
});
|
||||
});
|
||||
|
||||
test('Should parse markdown h1 title at the top and unwrap inline code block', () => {
|
||||
it('parses markdown h1 title at the top and unwrap inline code block', () => {
|
||||
const markdown = dedent`
|
||||
|
||||
# \`Markdown Title\`
|
||||
|
@ -192,7 +192,7 @@ describe('parseMarkdownContentTitle', () => {
|
|||
});
|
||||
});
|
||||
|
||||
test('Should parse markdown h1 title and trim content', () => {
|
||||
it('parses markdown h1 title and trim content', () => {
|
||||
const markdown = `
|
||||
|
||||
# Markdown Title
|
||||
|
@ -209,7 +209,7 @@ Lorem Ipsum
|
|||
});
|
||||
});
|
||||
|
||||
test('Should parse not parse markdown h1 title and trim content', () => {
|
||||
it('parses not parse markdown h1 title and trim content', () => {
|
||||
const markdown = `
|
||||
|
||||
Lorem Ipsum
|
||||
|
@ -222,7 +222,7 @@ Lorem Ipsum
|
|||
});
|
||||
});
|
||||
|
||||
test('Should parse markdown h1 title with fixed anchor-id syntax', () => {
|
||||
it('parses markdown h1 title with fixed anchor-id syntax', () => {
|
||||
const markdown = dedent`
|
||||
|
||||
# Markdown Title {#my-anchor-id}
|
||||
|
@ -236,7 +236,7 @@ Lorem Ipsum
|
|||
});
|
||||
});
|
||||
|
||||
test('Should parse markdown h1 title at the top (atx style with closing #)', () => {
|
||||
it('parses markdown h1 title at the top (atx style with closing #)', () => {
|
||||
const markdown = dedent`
|
||||
|
||||
# Markdown Title #
|
||||
|
@ -250,7 +250,7 @@ Lorem Ipsum
|
|||
});
|
||||
});
|
||||
|
||||
test('Should parse markdown h1 title at the top followed by h2 title', () => {
|
||||
it('parses markdown h1 title at the top followed by h2 title', () => {
|
||||
const markdown = dedent`
|
||||
|
||||
# Markdown Title
|
||||
|
@ -266,7 +266,7 @@ Lorem Ipsum
|
|||
});
|
||||
});
|
||||
|
||||
test('Should parse only first h1 title', () => {
|
||||
it('parses only first h1 title', () => {
|
||||
const markdown = dedent`
|
||||
|
||||
# Markdown Title
|
||||
|
@ -282,7 +282,7 @@ Lorem Ipsum
|
|||
});
|
||||
});
|
||||
|
||||
test('Should not parse title that is not at the top', () => {
|
||||
it('does not parse title that is not at the top', () => {
|
||||
const markdown = dedent`
|
||||
|
||||
Lorem Ipsum
|
||||
|
@ -298,7 +298,7 @@ Lorem Ipsum
|
|||
});
|
||||
});
|
||||
|
||||
test('Should parse markdown h1 alternate title', () => {
|
||||
it('parses markdown h1 alternate title', () => {
|
||||
const markdown = dedent`
|
||||
|
||||
Markdown Title
|
||||
|
@ -313,7 +313,7 @@ Lorem Ipsum
|
|||
});
|
||||
});
|
||||
|
||||
test('Should parse markdown h1 alternate title and remove it', () => {
|
||||
it('parses markdown h1 alternate title and remove it', () => {
|
||||
const markdown = dedent`
|
||||
|
||||
Markdown Title
|
||||
|
@ -330,7 +330,7 @@ Lorem Ipsum
|
|||
});
|
||||
});
|
||||
|
||||
test('Should parse markdown h1 title placed after import declarations', () => {
|
||||
it('parses markdown h1 title placed after import declarations', () => {
|
||||
const markdown = dedent`
|
||||
import Component1 from '@site/src/components/Component1';
|
||||
|
||||
|
@ -351,7 +351,7 @@ Lorem Ipsum
|
|||
});
|
||||
});
|
||||
|
||||
test('Should parse markdown h1 title placed after various import declarations', () => {
|
||||
it('parses markdown h1 title placed after various import declarations', () => {
|
||||
const markdown = `
|
||||
import DefaultComponent from '@site/src/components/Component1';
|
||||
import DefaultComponent2 from '../relative/path/Component2';
|
||||
|
@ -379,7 +379,7 @@ Lorem Ipsum
|
|||
});
|
||||
});
|
||||
|
||||
test('Should parse markdown h1 title placed after various import declarations and remove it', () => {
|
||||
it('parses markdown h1 title placed after various import declarations and remove it', () => {
|
||||
const markdown = `
|
||||
import DefaultComponent from '@site/src/components/Component1';
|
||||
import DefaultComponent2 from '../relative/path/Component2';
|
||||
|
@ -409,7 +409,7 @@ Lorem Ipsum
|
|||
});
|
||||
});
|
||||
|
||||
test('Should parse markdown h1 alternate title placed after import declarations', () => {
|
||||
it('parses markdown h1 alternate title placed after import declarations', () => {
|
||||
const markdown = dedent`
|
||||
|
||||
import Component from '@site/src/components/Component';
|
||||
|
@ -428,7 +428,7 @@ Lorem Ipsum
|
|||
});
|
||||
});
|
||||
|
||||
test('Should parse markdown h1 alternate title placed after import declarations and remove it', () => {
|
||||
it('parses markdown h1 alternate title placed after import declarations and remove it', () => {
|
||||
const markdown = dedent`
|
||||
|
||||
import Component from '@site/src/components/Component';
|
||||
|
@ -449,7 +449,7 @@ Lorem Ipsum
|
|||
});
|
||||
});
|
||||
|
||||
test('Should parse title-only', () => {
|
||||
it('parses title-only', () => {
|
||||
const markdown = '# Document With Only A Title';
|
||||
expect(parseMarkdownContentTitle(markdown)).toEqual({
|
||||
content: markdown,
|
||||
|
@ -457,7 +457,7 @@ Lorem Ipsum
|
|||
});
|
||||
});
|
||||
|
||||
test('Should not parse markdown h1 title in the middle of a doc', () => {
|
||||
it('does not parse markdown h1 title in the middle of a doc', () => {
|
||||
const markdown = dedent`
|
||||
|
||||
Lorem Ipsum
|
||||
|
@ -473,7 +473,7 @@ Lorem Ipsum
|
|||
});
|
||||
});
|
||||
|
||||
test('Should not parse markdown h1 alternate title in the middle of the doc', () => {
|
||||
it('does not parse markdown h1 alternate title in the middle of the doc', () => {
|
||||
const markdown = dedent`
|
||||
|
||||
Lorem Ipsum
|
||||
|
@ -490,7 +490,7 @@ Lorem Ipsum
|
|||
});
|
||||
});
|
||||
|
||||
test('Should parse markdown h1 title placed after multiple import declarations', () => {
|
||||
it('parses markdown h1 title placed after multiple import declarations', () => {
|
||||
const markdown = dedent`
|
||||
import Component1 from '@site/src/components/Component1';
|
||||
import Component2 from '@site/src/components/Component2';
|
||||
|
@ -520,7 +520,7 @@ Lorem Ipsum
|
|||
});
|
||||
});
|
||||
|
||||
test('Should parse markdown h1 title placed after multiple import declarations and remove it', () => {
|
||||
it('parses markdown h1 title placed after multiple import declarations and remove it', () => {
|
||||
const markdown = dedent`
|
||||
import Component1 from '@site/src/components/Component1';
|
||||
import Component2 from '@site/src/components/Component2';
|
||||
|
@ -554,7 +554,7 @@ Lorem Ipsum
|
|||
});
|
||||
|
||||
describe('parseMarkdownString', () => {
|
||||
test('parse markdown with front matter', () => {
|
||||
it('parse markdown with front matter', () => {
|
||||
expect(
|
||||
parseMarkdownString(dedent`
|
||||
---
|
||||
|
@ -563,38 +563,20 @@ describe('parseMarkdownString', () => {
|
|||
|
||||
Some text
|
||||
`),
|
||||
).toMatchInlineSnapshot(`
|
||||
Object {
|
||||
"content": "Some text",
|
||||
"contentTitle": undefined,
|
||||
"excerpt": "Some text",
|
||||
"frontMatter": Object {
|
||||
"title": "Frontmatter title",
|
||||
},
|
||||
}
|
||||
`);
|
||||
).toMatchSnapshot();
|
||||
});
|
||||
|
||||
test('should parse first heading as contentTitle', () => {
|
||||
it('parses first heading as contentTitle', () => {
|
||||
expect(
|
||||
parseMarkdownString(dedent`
|
||||
# Markdown Title
|
||||
|
||||
Some text
|
||||
`),
|
||||
).toMatchInlineSnapshot(`
|
||||
Object {
|
||||
"content": "# Markdown Title
|
||||
|
||||
Some text",
|
||||
"contentTitle": "Markdown Title",
|
||||
"excerpt": "Some text",
|
||||
"frontMatter": Object {},
|
||||
}
|
||||
`);
|
||||
).toMatchSnapshot();
|
||||
});
|
||||
|
||||
test('should warn about duplicate titles (front matter + markdown)', () => {
|
||||
it('warns about duplicate titles (front matter + markdown)', () => {
|
||||
expect(
|
||||
parseMarkdownString(dedent`
|
||||
---
|
||||
|
@ -605,21 +587,10 @@ describe('parseMarkdownString', () => {
|
|||
|
||||
Some text
|
||||
`),
|
||||
).toMatchInlineSnapshot(`
|
||||
Object {
|
||||
"content": "# Markdown Title
|
||||
|
||||
Some text",
|
||||
"contentTitle": "Markdown Title",
|
||||
"excerpt": "Some text",
|
||||
"frontMatter": Object {
|
||||
"title": "Frontmatter title",
|
||||
},
|
||||
}
|
||||
`);
|
||||
).toMatchSnapshot();
|
||||
});
|
||||
|
||||
test('should warn about duplicate titles (front matter + markdown alternate)', () => {
|
||||
it('warns about duplicate titles (front matter + markdown alternate)', () => {
|
||||
expect(
|
||||
parseMarkdownString(dedent`
|
||||
---
|
||||
|
@ -631,22 +602,10 @@ describe('parseMarkdownString', () => {
|
|||
|
||||
Some text
|
||||
`),
|
||||
).toMatchInlineSnapshot(`
|
||||
Object {
|
||||
"content": "Markdown Title alternate
|
||||
================
|
||||
|
||||
Some text",
|
||||
"contentTitle": "Markdown Title alternate",
|
||||
"excerpt": "Some text",
|
||||
"frontMatter": Object {
|
||||
"title": "Frontmatter title",
|
||||
},
|
||||
}
|
||||
`);
|
||||
).toMatchSnapshot();
|
||||
});
|
||||
|
||||
test('should not warn for duplicate title if markdown title is not at the top', () => {
|
||||
it('does not warn for duplicate title if markdown title is not at the top', () => {
|
||||
expect(
|
||||
parseMarkdownString(dedent`
|
||||
---
|
||||
|
@ -657,21 +616,10 @@ describe('parseMarkdownString', () => {
|
|||
|
||||
# Markdown Title
|
||||
`),
|
||||
).toMatchInlineSnapshot(`
|
||||
Object {
|
||||
"content": "foo
|
||||
|
||||
# Markdown Title",
|
||||
"contentTitle": undefined,
|
||||
"excerpt": "foo",
|
||||
"frontMatter": Object {
|
||||
"title": "Frontmatter title",
|
||||
},
|
||||
}
|
||||
`);
|
||||
).toMatchSnapshot();
|
||||
});
|
||||
|
||||
test('should delete only first heading', () => {
|
||||
it('deletes only first heading', () => {
|
||||
expect(
|
||||
parseMarkdownString(dedent`
|
||||
# Markdown Title
|
||||
|
@ -682,23 +630,10 @@ describe('parseMarkdownString', () => {
|
|||
|
||||
### Markdown Title h3
|
||||
`),
|
||||
).toMatchInlineSnapshot(`
|
||||
Object {
|
||||
"content": "# Markdown Title
|
||||
|
||||
test test test # test bar
|
||||
|
||||
# Markdown Title 2
|
||||
|
||||
### Markdown Title h3",
|
||||
"contentTitle": "Markdown Title",
|
||||
"excerpt": "test test test # test bar",
|
||||
"frontMatter": Object {},
|
||||
}
|
||||
`);
|
||||
).toMatchSnapshot();
|
||||
});
|
||||
|
||||
test('should parse front-matter and ignore h2', () => {
|
||||
it('parses front-matter and ignore h2', () => {
|
||||
expect(
|
||||
parseMarkdownString(
|
||||
dedent`
|
||||
|
@ -708,66 +643,33 @@ describe('parseMarkdownString', () => {
|
|||
## test
|
||||
`,
|
||||
),
|
||||
).toMatchInlineSnapshot(`
|
||||
Object {
|
||||
"content": "## test",
|
||||
"contentTitle": undefined,
|
||||
"excerpt": "test",
|
||||
"frontMatter": Object {
|
||||
"title": "Frontmatter title",
|
||||
},
|
||||
}
|
||||
`);
|
||||
).toMatchSnapshot();
|
||||
});
|
||||
|
||||
test('should read front matter only', () => {
|
||||
it('reads front matter only', () => {
|
||||
expect(
|
||||
parseMarkdownString(dedent`
|
||||
---
|
||||
title: test
|
||||
---
|
||||
`),
|
||||
).toMatchInlineSnapshot(`
|
||||
Object {
|
||||
"content": "",
|
||||
"contentTitle": undefined,
|
||||
"excerpt": undefined,
|
||||
"frontMatter": Object {
|
||||
"title": "test",
|
||||
},
|
||||
}
|
||||
`);
|
||||
).toMatchSnapshot();
|
||||
});
|
||||
|
||||
test('should parse title only', () => {
|
||||
expect(parseMarkdownString('# test')).toMatchInlineSnapshot(`
|
||||
Object {
|
||||
"content": "# test",
|
||||
"contentTitle": "test",
|
||||
"excerpt": undefined,
|
||||
"frontMatter": Object {},
|
||||
}
|
||||
`);
|
||||
it('parses title only', () => {
|
||||
expect(parseMarkdownString('# test')).toMatchSnapshot();
|
||||
});
|
||||
|
||||
test('should parse title only alternate', () => {
|
||||
it('parses title only alternate', () => {
|
||||
expect(
|
||||
parseMarkdownString(dedent`
|
||||
test
|
||||
===
|
||||
`),
|
||||
).toMatchInlineSnapshot(`
|
||||
Object {
|
||||
"content": "test
|
||||
===",
|
||||
"contentTitle": "test",
|
||||
"excerpt": undefined,
|
||||
"frontMatter": Object {},
|
||||
}
|
||||
`);
|
||||
).toMatchSnapshot();
|
||||
});
|
||||
|
||||
test('should warn about duplicate titles', () => {
|
||||
it('warns about duplicate titles', () => {
|
||||
expect(
|
||||
parseMarkdownString(dedent`
|
||||
---
|
||||
|
@ -775,36 +677,19 @@ describe('parseMarkdownString', () => {
|
|||
---
|
||||
# test
|
||||
`),
|
||||
).toMatchInlineSnapshot(`
|
||||
Object {
|
||||
"content": "# test",
|
||||
"contentTitle": "test",
|
||||
"excerpt": undefined,
|
||||
"frontMatter": Object {
|
||||
"title": "Frontmatter title",
|
||||
},
|
||||
}
|
||||
`);
|
||||
).toMatchSnapshot();
|
||||
});
|
||||
|
||||
test('should ignore markdown title if its not a first text', () => {
|
||||
it('ignores markdown title if its not a first text', () => {
|
||||
expect(
|
||||
parseMarkdownString(dedent`
|
||||
foo
|
||||
# test
|
||||
`),
|
||||
).toMatchInlineSnapshot(`
|
||||
Object {
|
||||
"content": "foo
|
||||
# test",
|
||||
"contentTitle": undefined,
|
||||
"excerpt": "foo",
|
||||
"frontMatter": Object {},
|
||||
}
|
||||
`);
|
||||
).toMatchSnapshot();
|
||||
});
|
||||
|
||||
test('should delete only first heading 2', () => {
|
||||
it('deletes only first heading 2', () => {
|
||||
expect(
|
||||
parseMarkdownString(dedent`
|
||||
# test
|
||||
|
@ -815,23 +700,10 @@ describe('parseMarkdownString', () => {
|
|||
### test
|
||||
test3
|
||||
`),
|
||||
).toMatchInlineSnapshot(`
|
||||
Object {
|
||||
"content": "# test
|
||||
|
||||
test test test test test test
|
||||
test test test # test bar
|
||||
# test2
|
||||
### test
|
||||
test3",
|
||||
"contentTitle": "test",
|
||||
"excerpt": "test test test test test test",
|
||||
"frontMatter": Object {},
|
||||
}
|
||||
`);
|
||||
).toMatchSnapshot();
|
||||
});
|
||||
|
||||
test('should handle code blocks', () => {
|
||||
it('handles code blocks', () => {
|
||||
expect(
|
||||
parseMarkdownString(dedent`
|
||||
\`\`\`js
|
||||
|
@ -840,18 +712,7 @@ describe('parseMarkdownString', () => {
|
|||
|
||||
Content
|
||||
`),
|
||||
).toMatchInlineSnapshot(`
|
||||
Object {
|
||||
"content": "\`\`\`js
|
||||
code
|
||||
\`\`\`
|
||||
|
||||
Content",
|
||||
"contentTitle": undefined,
|
||||
"excerpt": "Content",
|
||||
"frontMatter": Object {},
|
||||
}
|
||||
`);
|
||||
).toMatchSnapshot();
|
||||
expect(
|
||||
parseMarkdownString(dedent`
|
||||
\`\`\`\`js
|
||||
|
@ -864,22 +725,7 @@ describe('parseMarkdownString', () => {
|
|||
|
||||
Content
|
||||
`),
|
||||
).toMatchInlineSnapshot(`
|
||||
Object {
|
||||
"content": "\`\`\`\`js
|
||||
Foo
|
||||
\`\`\`diff
|
||||
code
|
||||
\`\`\`
|
||||
Bar
|
||||
\`\`\`\`
|
||||
|
||||
Content",
|
||||
"contentTitle": undefined,
|
||||
"excerpt": "Content",
|
||||
"frontMatter": Object {},
|
||||
}
|
||||
`);
|
||||
).toMatchSnapshot();
|
||||
expect(
|
||||
parseMarkdownString(dedent`
|
||||
\`\`\`\`js
|
||||
|
@ -890,23 +736,10 @@ describe('parseMarkdownString', () => {
|
|||
|
||||
Content
|
||||
`),
|
||||
).toMatchInlineSnapshot(`
|
||||
Object {
|
||||
"content": "\`\`\`\`js
|
||||
Foo
|
||||
\`\`\`diff
|
||||
code
|
||||
\`\`\`\`
|
||||
|
||||
Content",
|
||||
"contentTitle": undefined,
|
||||
"excerpt": "Content",
|
||||
"frontMatter": Object {},
|
||||
}
|
||||
`);
|
||||
).toMatchSnapshot();
|
||||
});
|
||||
|
||||
test('throws for invalid front matter', () => {
|
||||
it('throws for invalid front matter', () => {
|
||||
expect(() =>
|
||||
parseMarkdownString(dedent`
|
||||
---
|
||||
|
@ -922,35 +755,35 @@ describe('parseMarkdownString', () => {
|
|||
});
|
||||
|
||||
describe('parseMarkdownHeadingId', () => {
|
||||
test('can parse simple heading without id', () => {
|
||||
it('can parse simple heading without id', () => {
|
||||
expect(parseMarkdownHeadingId('## Some heading')).toEqual({
|
||||
text: '## Some heading',
|
||||
id: undefined,
|
||||
});
|
||||
});
|
||||
|
||||
test('can parse simple heading with id', () => {
|
||||
it('can parse simple heading with id', () => {
|
||||
expect(parseMarkdownHeadingId('## Some heading {#custom-_id}')).toEqual({
|
||||
text: '## Some heading',
|
||||
id: 'custom-_id',
|
||||
});
|
||||
});
|
||||
|
||||
test('can parse heading not ending with the id', () => {
|
||||
it('can parse heading not ending with the id', () => {
|
||||
expect(parseMarkdownHeadingId('## {#custom-_id} Some heading')).toEqual({
|
||||
text: '## {#custom-_id} Some heading',
|
||||
id: undefined,
|
||||
});
|
||||
});
|
||||
|
||||
test('can parse heading with multiple id', () => {
|
||||
it('can parse heading with multiple id', () => {
|
||||
expect(parseMarkdownHeadingId('## Some heading {#id1} {#id2}')).toEqual({
|
||||
text: '## Some heading {#id1}',
|
||||
id: 'id2',
|
||||
});
|
||||
});
|
||||
|
||||
test('can parse heading with link and id', () => {
|
||||
it('can parse heading with link and id', () => {
|
||||
expect(
|
||||
parseMarkdownHeadingId(
|
||||
'## Some heading [facebook](https://facebook.com) {#id}',
|
||||
|
@ -961,7 +794,7 @@ describe('parseMarkdownHeadingId', () => {
|
|||
});
|
||||
});
|
||||
|
||||
test('can parse heading with only id', () => {
|
||||
it('can parse heading with only id', () => {
|
||||
expect(parseMarkdownHeadingId('## {#id}')).toEqual({
|
||||
text: '##',
|
||||
id: 'id',
|
||||
|
|
|
@ -5,6 +5,7 @@
|
|||
* LICENSE file in the root directory of this source tree.
|
||||
*/
|
||||
|
||||
import {jest} from '@jest/globals';
|
||||
import {
|
||||
isNameTooLong,
|
||||
shortName,
|
||||
|
@ -17,7 +18,7 @@ import {
|
|||
import path from 'path';
|
||||
|
||||
describe('isNameTooLong', () => {
|
||||
test('behaves correctly', () => {
|
||||
it('works', () => {
|
||||
const asserts = {
|
||||
'': false,
|
||||
'foo-bar-096': false,
|
||||
|
@ -58,7 +59,7 @@ describe('isNameTooLong', () => {
|
|||
});
|
||||
|
||||
describe('shortName', () => {
|
||||
test('works', () => {
|
||||
it('works', () => {
|
||||
const asserts = {
|
||||
'': '',
|
||||
'foo-bar': 'foo-bar',
|
||||
|
@ -104,76 +105,87 @@ describe('shortName', () => {
|
|||
const VERY_LONG_PATH = `/${`x`.repeat(256)}/`;
|
||||
const VERY_LONG_PATH_NON_LATIN = `/${`あ`.repeat(255)}/`;
|
||||
|
||||
test('Truncates long paths correctly', () => {
|
||||
it('truncates long paths correctly', () => {
|
||||
const truncatedPathLatin = shortName(VERY_LONG_PATH);
|
||||
const truncatedPathNonLatin = shortName(VERY_LONG_PATH_NON_LATIN);
|
||||
expect(truncatedPathLatin.length).toBeLessThanOrEqual(255);
|
||||
expect(truncatedPathNonLatin.length).toBeLessThanOrEqual(255);
|
||||
});
|
||||
|
||||
test('Does not truncate short paths', () => {
|
||||
it('does not truncate short paths', () => {
|
||||
const truncatedPath = shortName(SHORT_PATH);
|
||||
expect(truncatedPath).toEqual(SHORT_PATH);
|
||||
});
|
||||
});
|
||||
|
||||
test('toMessageRelativeFilePath', () => {
|
||||
jest
|
||||
.spyOn(process, 'cwd')
|
||||
.mockImplementationOnce(() => path.join(__dirname, '..'));
|
||||
expect(toMessageRelativeFilePath(path.join(__dirname, 'foo/bar.js'))).toEqual(
|
||||
'__tests__/foo/bar.js',
|
||||
);
|
||||
});
|
||||
|
||||
test('escapePath', () => {
|
||||
const asserts: Record<string, string> = {
|
||||
'c:/aaaa\\bbbb': 'c:/aaaa\\\\bbbb',
|
||||
'c:\\aaaa\\bbbb\\★': 'c:\\\\aaaa\\\\bbbb\\\\★',
|
||||
'\\\\?\\c:\\aaaa\\bbbb': '\\\\\\\\?\\\\c:\\\\aaaa\\\\bbbb',
|
||||
'c:\\aaaa\\bbbb': 'c:\\\\aaaa\\\\bbbb',
|
||||
'foo\\bar': 'foo\\\\bar',
|
||||
'foo\\bar/lol': 'foo\\\\bar/lol',
|
||||
'website\\docs/**/*.{md,mdx}': 'website\\\\docs/**/*.{md,mdx}',
|
||||
};
|
||||
Object.keys(asserts).forEach((file) => {
|
||||
expect(escapePath(file)).toBe(asserts[file]);
|
||||
describe('toMessageRelativeFilePath', () => {
|
||||
it('works', () => {
|
||||
jest
|
||||
.spyOn(process, 'cwd')
|
||||
.mockImplementationOnce(() => path.join(__dirname, '..'));
|
||||
expect(
|
||||
toMessageRelativeFilePath(path.join(__dirname, 'foo/bar.js')),
|
||||
).toEqual('__tests__/foo/bar.js');
|
||||
});
|
||||
});
|
||||
|
||||
test('posixPath', () => {
|
||||
const asserts: Record<string, string> = {
|
||||
'c:/aaaa\\bbbb': 'c:/aaaa/bbbb',
|
||||
'c:\\aaaa\\bbbb\\★': 'c:\\aaaa\\bbbb\\★',
|
||||
'\\\\?\\c:\\aaaa\\bbbb': '\\\\?\\c:\\aaaa\\bbbb',
|
||||
'c:\\aaaa\\bbbb': 'c:/aaaa/bbbb',
|
||||
'foo\\bar': 'foo/bar',
|
||||
'foo\\bar/lol': 'foo/bar/lol',
|
||||
'website\\docs/**/*.{md,mdx}': 'website/docs/**/*.{md,mdx}',
|
||||
};
|
||||
Object.keys(asserts).forEach((file) => {
|
||||
expect(posixPath(file)).toBe(asserts[file]);
|
||||
describe('escapePath', () => {
|
||||
it('works', () => {
|
||||
const asserts: Record<string, string> = {
|
||||
'c:/aaaa\\bbbb': 'c:/aaaa\\\\bbbb',
|
||||
'c:\\aaaa\\bbbb\\★': 'c:\\\\aaaa\\\\bbbb\\\\★',
|
||||
'\\\\?\\c:\\aaaa\\bbbb': '\\\\\\\\?\\\\c:\\\\aaaa\\\\bbbb',
|
||||
'c:\\aaaa\\bbbb': 'c:\\\\aaaa\\\\bbbb',
|
||||
'foo\\bar': 'foo\\\\bar',
|
||||
'foo\\bar/lol': 'foo\\\\bar/lol',
|
||||
'website\\docs/**/*.{md,mdx}': 'website\\\\docs/**/*.{md,mdx}',
|
||||
};
|
||||
Object.keys(asserts).forEach((file) => {
|
||||
expect(escapePath(file)).toBe(asserts[file]);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
test('aliasedSitePath', () => {
|
||||
const asserts: Record<string, string> = {
|
||||
'user/website/docs/asd.md': '@site/docs/asd.md',
|
||||
'user/website/versioned_docs/foo/bar.md': '@site/versioned_docs/foo/bar.md',
|
||||
'user/docs/test.md': '@site/../docs/test.md',
|
||||
};
|
||||
Object.keys(asserts).forEach((file) => {
|
||||
expect(posixPath(aliasedSitePath(file, 'user/website'))).toBe(
|
||||
asserts[file],
|
||||
describe('posixPath', () => {
|
||||
it('works', () => {
|
||||
const asserts: Record<string, string> = {
|
||||
'c:/aaaa\\bbbb': 'c:/aaaa/bbbb',
|
||||
'c:\\aaaa\\bbbb\\★': 'c:\\aaaa\\bbbb\\★',
|
||||
'\\\\?\\c:\\aaaa\\bbbb': '\\\\?\\c:\\aaaa\\bbbb',
|
||||
'c:\\aaaa\\bbbb': 'c:/aaaa/bbbb',
|
||||
'foo\\bar': 'foo/bar',
|
||||
'foo\\bar/lol': 'foo/bar/lol',
|
||||
'website\\docs/**/*.{md,mdx}': 'website/docs/**/*.{md,mdx}',
|
||||
};
|
||||
Object.keys(asserts).forEach((file) => {
|
||||
expect(posixPath(file)).toBe(asserts[file]);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('aliasedSitePath', () => {
|
||||
it('works', () => {
|
||||
const asserts: Record<string, string> = {
|
||||
'user/website/docs/asd.md': '@site/docs/asd.md',
|
||||
'user/website/versioned_docs/foo/bar.md':
|
||||
'@site/versioned_docs/foo/bar.md',
|
||||
'user/docs/test.md': '@site/../docs/test.md',
|
||||
};
|
||||
Object.keys(asserts).forEach((file) => {
|
||||
expect(posixPath(aliasedSitePath(file, 'user/website'))).toBe(
|
||||
asserts[file],
|
||||
);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('addTrailingPathSeparator', () => {
|
||||
it('works', () => {
|
||||
expect(addTrailingPathSeparator('foo')).toEqual(
|
||||
process.platform === 'win32' ? 'foo\\' : 'foo/',
|
||||
);
|
||||
expect(addTrailingPathSeparator('foo/')).toEqual(
|
||||
process.platform === 'win32' ? 'foo\\' : 'foo/',
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
test('addTrailingPathSeparator', () => {
|
||||
expect(addTrailingPathSeparator('foo')).toEqual(
|
||||
process.platform === 'win32' ? 'foo\\' : 'foo/',
|
||||
);
|
||||
expect(addTrailingPathSeparator('foo/')).toEqual(
|
||||
process.platform === 'win32' ? 'foo\\' : 'foo/',
|
||||
);
|
||||
});
|
||||
|
|
|
@ -8,7 +8,7 @@
|
|||
import {createSlugger} from '../slugger';
|
||||
|
||||
describe('createSlugger', () => {
|
||||
test('can create unique slugs', () => {
|
||||
it('can create unique slugs', () => {
|
||||
const slugger = createSlugger();
|
||||
expect(slugger.slug('Some$/vaLue$!^')).toEqual('somevalue');
|
||||
expect(slugger.slug('Some$/vaLue$!^')).toEqual('somevalue-1');
|
||||
|
@ -16,7 +16,7 @@ describe('createSlugger', () => {
|
|||
expect(slugger.slug('Some$/vaLue$!^-1')).toEqual('somevalue-1-1');
|
||||
});
|
||||
|
||||
test('can create unique slugs respecting case', () => {
|
||||
it('can create unique slugs respecting case', () => {
|
||||
const slugger = createSlugger();
|
||||
const opt = {maintainCase: true};
|
||||
expect(slugger.slug('Some$/vaLue$!^', opt)).toEqual('SomevaLue');
|
||||
|
|
|
@ -16,7 +16,7 @@ describe('normalizeFrontMatterTag', () => {
|
|||
type Input = Parameters<typeof normalizeFrontMatterTag>[1];
|
||||
type Output = ReturnType<typeof normalizeFrontMatterTag>;
|
||||
|
||||
test('should normalize simple string tag', () => {
|
||||
it('normalizes simple string tag', () => {
|
||||
const tagsPath = '/all/tags';
|
||||
const input: Input = 'tag';
|
||||
const expectedOutput: Output = {
|
||||
|
@ -26,7 +26,7 @@ describe('normalizeFrontMatterTag', () => {
|
|||
expect(normalizeFrontMatterTag(tagsPath, input)).toEqual(expectedOutput);
|
||||
});
|
||||
|
||||
test('should normalize complex string tag', () => {
|
||||
it('normalizes complex string tag', () => {
|
||||
const tagsPath = '/all/tags';
|
||||
const input: Input = 'some more Complex_tag';
|
||||
const expectedOutput: Output = {
|
||||
|
@ -36,7 +36,7 @@ describe('normalizeFrontMatterTag', () => {
|
|||
expect(normalizeFrontMatterTag(tagsPath, input)).toEqual(expectedOutput);
|
||||
});
|
||||
|
||||
test('should normalize simple object tag', () => {
|
||||
it('normalizes simple object tag', () => {
|
||||
const tagsPath = '/all/tags';
|
||||
const input: Input = {label: 'tag', permalink: 'tagPermalink'};
|
||||
const expectedOutput: Output = {
|
||||
|
@ -46,7 +46,7 @@ describe('normalizeFrontMatterTag', () => {
|
|||
expect(normalizeFrontMatterTag(tagsPath, input)).toEqual(expectedOutput);
|
||||
});
|
||||
|
||||
test('should normalize complex string tag with object tag', () => {
|
||||
it('normalizes complex string tag with object tag', () => {
|
||||
const tagsPath = '/all/tags';
|
||||
const input: Input = {
|
||||
label: 'tag complex Label',
|
||||
|
@ -64,7 +64,7 @@ describe('normalizeFrontMatterTags', () => {
|
|||
type Input = Parameters<typeof normalizeFrontMatterTags>[1];
|
||||
type Output = ReturnType<typeof normalizeFrontMatterTags>;
|
||||
|
||||
test('should normalize string list', () => {
|
||||
it('normalizes string list', () => {
|
||||
const tagsPath = '/all/tags';
|
||||
const input: Input = ['tag 1', 'tag-1', 'tag 3', 'tag1', 'tag-2'];
|
||||
// Keep user input order but remove tags that lead to same permalink
|
||||
|
@ -85,11 +85,11 @@ describe('normalizeFrontMatterTags', () => {
|
|||
expect(normalizeFrontMatterTags(tagsPath, input)).toEqual(expectedOutput);
|
||||
});
|
||||
|
||||
test('succeeds for empty list', () => {
|
||||
it('succeeds for empty list', () => {
|
||||
expect(normalizeFrontMatterTags('/foo')).toEqual([]);
|
||||
});
|
||||
|
||||
test('should normalize complex mixed list', () => {
|
||||
it('normalizes complex mixed list', () => {
|
||||
const tagsPath = '/all/tags';
|
||||
const input: Input = [
|
||||
'tag 1',
|
||||
|
@ -131,7 +131,7 @@ describe('groupTaggedItems', () => {
|
|||
type Input = Parameters<typeof groupItems>[0];
|
||||
type Output = ReturnType<typeof groupItems>;
|
||||
|
||||
test('should group items by tag permalink', () => {
|
||||
it('groups items by tag permalink', () => {
|
||||
const tagGuide = {label: 'Guide', permalink: '/guide'};
|
||||
const tagTutorial = {label: 'Tutorial', permalink: '/tutorial'};
|
||||
const tagAPI = {label: 'API', permalink: '/api'};
|
||||
|
|
|
@ -18,7 +18,7 @@ import {
|
|||
} from '../urlUtils';
|
||||
|
||||
describe('normalizeUrl', () => {
|
||||
test('should normalize urls correctly', () => {
|
||||
it('normalizes urls correctly', () => {
|
||||
const asserts = [
|
||||
{
|
||||
input: ['/', ''],
|
||||
|
@ -143,7 +143,7 @@ describe('normalizeUrl', () => {
|
|||
});
|
||||
|
||||
describe('getEditUrl', () => {
|
||||
test('returns right path', () => {
|
||||
it('returns right path', () => {
|
||||
expect(
|
||||
getEditUrl('foo/bar.md', 'https://github.com/facebook/docusaurus'),
|
||||
).toEqual('https://github.com/facebook/docusaurus/foo/bar.md');
|
||||
|
@ -151,91 +151,99 @@ describe('getEditUrl', () => {
|
|||
getEditUrl('foo/你好.md', 'https://github.com/facebook/docusaurus'),
|
||||
).toEqual('https://github.com/facebook/docusaurus/foo/你好.md');
|
||||
});
|
||||
test('always returns valid URL', () => {
|
||||
it('always returns valid URL', () => {
|
||||
expect(
|
||||
getEditUrl('foo\\你好.md', 'https://github.com/facebook/docusaurus'),
|
||||
).toEqual('https://github.com/facebook/docusaurus/foo/你好.md');
|
||||
});
|
||||
test('returns undefined for undefined', () => {
|
||||
it('returns undefined for undefined', () => {
|
||||
expect(getEditUrl('foo/bar.md')).toBeUndefined();
|
||||
});
|
||||
});
|
||||
|
||||
test('fileToPath', () => {
|
||||
const asserts: Record<string, string> = {
|
||||
'index.md': '/',
|
||||
'hello/index.md': '/hello/',
|
||||
'foo.md': '/foo',
|
||||
'foo/bar.md': '/foo/bar',
|
||||
'index.js': '/',
|
||||
'hello/index.js': '/hello/',
|
||||
'foo.js': '/foo',
|
||||
'foo/bar.js': '/foo/bar',
|
||||
};
|
||||
Object.keys(asserts).forEach((file) => {
|
||||
expect(fileToPath(file)).toBe(asserts[file]);
|
||||
describe('fileToPath', () => {
|
||||
it('works', () => {
|
||||
const asserts: Record<string, string> = {
|
||||
'index.md': '/',
|
||||
'hello/index.md': '/hello/',
|
||||
'foo.md': '/foo',
|
||||
'foo/bar.md': '/foo/bar',
|
||||
'index.js': '/',
|
||||
'hello/index.js': '/hello/',
|
||||
'foo.js': '/foo',
|
||||
'foo/bar.js': '/foo/bar',
|
||||
};
|
||||
Object.keys(asserts).forEach((file) => {
|
||||
expect(fileToPath(file)).toBe(asserts[file]);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
test('isValidPathname', () => {
|
||||
expect(isValidPathname('/')).toBe(true);
|
||||
expect(isValidPathname('/hey')).toBe(true);
|
||||
expect(isValidPathname('/hey/ho')).toBe(true);
|
||||
expect(isValidPathname('/hey/ho/')).toBe(true);
|
||||
expect(isValidPathname('/hey/h%C3%B4/')).toBe(true);
|
||||
expect(isValidPathname('/hey///ho///')).toBe(true); // Unexpected but valid
|
||||
expect(isValidPathname('/hey/héllô you')).toBe(true);
|
||||
describe('isValidPathname', () => {
|
||||
it('works', () => {
|
||||
expect(isValidPathname('/')).toBe(true);
|
||||
expect(isValidPathname('/hey')).toBe(true);
|
||||
expect(isValidPathname('/hey/ho')).toBe(true);
|
||||
expect(isValidPathname('/hey/ho/')).toBe(true);
|
||||
expect(isValidPathname('/hey/h%C3%B4/')).toBe(true);
|
||||
expect(isValidPathname('/hey///ho///')).toBe(true); // Unexpected but valid
|
||||
expect(isValidPathname('/hey/héllô you')).toBe(true);
|
||||
|
||||
expect(isValidPathname('')).toBe(false);
|
||||
expect(isValidPathname('hey')).toBe(false);
|
||||
expect(isValidPathname('/hey?qs=ho')).toBe(false);
|
||||
expect(isValidPathname('https://fb.com/hey')).toBe(false);
|
||||
expect(isValidPathname('//hey')).toBe(false);
|
||||
expect(isValidPathname('////')).toBe(false);
|
||||
expect(isValidPathname('')).toBe(false);
|
||||
expect(isValidPathname('hey')).toBe(false);
|
||||
expect(isValidPathname('/hey?qs=ho')).toBe(false);
|
||||
expect(isValidPathname('https://fb.com/hey')).toBe(false);
|
||||
expect(isValidPathname('//hey')).toBe(false);
|
||||
expect(isValidPathname('////')).toBe(false);
|
||||
});
|
||||
});
|
||||
|
||||
describe('addTrailingSlash', () => {
|
||||
test('should no-op', () => {
|
||||
it('is no-op for path with trailing slash', () => {
|
||||
expect(addTrailingSlash('/abcd/')).toEqual('/abcd/');
|
||||
});
|
||||
test('should add /', () => {
|
||||
it('adds / for path without trailing slash', () => {
|
||||
expect(addTrailingSlash('/abcd')).toEqual('/abcd/');
|
||||
});
|
||||
});
|
||||
|
||||
describe('addLeadingSlash', () => {
|
||||
test('should no-op', () => {
|
||||
it('is no-op for path with leading slash', () => {
|
||||
expect(addLeadingSlash('/abc')).toEqual('/abc');
|
||||
});
|
||||
test('should add /', () => {
|
||||
it('adds / for path without leading slash', () => {
|
||||
expect(addLeadingSlash('abc')).toEqual('/abc');
|
||||
});
|
||||
});
|
||||
|
||||
describe('removeTrailingSlash', () => {
|
||||
test('should no-op', () => {
|
||||
it('is no-op for path without trailing slash', () => {
|
||||
expect(removeTrailingSlash('/abcd')).toEqual('/abcd');
|
||||
});
|
||||
test('should remove /', () => {
|
||||
it('removes / for path with trailing slash', () => {
|
||||
expect(removeTrailingSlash('/abcd/')).toEqual('/abcd');
|
||||
});
|
||||
});
|
||||
|
||||
test('resolvePathname', () => {
|
||||
// These tests are directly copied from https://github.com/mjackson/resolve-pathname/blob/master/modules/__tests__/resolvePathname-test.js
|
||||
// Maybe we want to wrap that logic in the future?
|
||||
expect(resolvePathname('c')).toEqual('c');
|
||||
expect(resolvePathname('c', 'a/b')).toEqual('a/c');
|
||||
expect(resolvePathname('/c', '/a/b')).toEqual('/c');
|
||||
expect(resolvePathname('', '/a/b')).toEqual('/a/b');
|
||||
expect(resolvePathname('../c', '/a/b')).toEqual('/c');
|
||||
expect(resolvePathname('c', '/a/b')).toEqual('/a/c');
|
||||
expect(resolvePathname('c', '/a/')).toEqual('/a/c');
|
||||
expect(resolvePathname('..', '/a/b')).toEqual('/');
|
||||
describe('resolvePathname', () => {
|
||||
it('works', () => {
|
||||
// These tests are directly copied from https://github.com/mjackson/resolve-pathname/blob/master/modules/__tests__/resolvePathname-test.js
|
||||
// Maybe we want to wrap that logic in the future?
|
||||
expect(resolvePathname('c')).toEqual('c');
|
||||
expect(resolvePathname('c', 'a/b')).toEqual('a/c');
|
||||
expect(resolvePathname('/c', '/a/b')).toEqual('/c');
|
||||
expect(resolvePathname('', '/a/b')).toEqual('/a/b');
|
||||
expect(resolvePathname('../c', '/a/b')).toEqual('/c');
|
||||
expect(resolvePathname('c', '/a/b')).toEqual('/a/c');
|
||||
expect(resolvePathname('c', '/a/')).toEqual('/a/c');
|
||||
expect(resolvePathname('..', '/a/b')).toEqual('/');
|
||||
});
|
||||
});
|
||||
|
||||
test('encodePath', () => {
|
||||
expect(encodePath('a/foo/')).toEqual('a/foo/');
|
||||
expect(encodePath('a/<foo>/')).toEqual('a/%3Cfoo%3E/');
|
||||
expect(encodePath('a/你好/')).toEqual('a/%E4%BD%A0%E5%A5%BD/');
|
||||
describe('encodePath', () => {
|
||||
it('works', () => {
|
||||
expect(encodePath('a/foo/')).toEqual('a/foo/');
|
||||
expect(encodePath('a/<foo>/')).toEqual('a/%3Cfoo%3E/');
|
||||
expect(encodePath('a/你好/')).toEqual('a/%E4%BD%A0%E5%A5%BD/');
|
||||
});
|
||||
});
|
||||
|
|
|
@ -8,7 +8,7 @@
|
|||
import {getFileLoaderUtils} from '../webpackUtils';
|
||||
|
||||
describe('getFileLoaderUtils()', () => {
|
||||
test('plugin svgo/removeViewBox and removeTitle should be disabled', () => {
|
||||
it('plugin svgo/removeViewBox and removeTitle should be disabled', () => {
|
||||
const {oneOf} = getFileLoaderUtils().rules.svg();
|
||||
expect(oneOf[0].use).toContainEqual(
|
||||
expect.objectContaining({
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue