mirror of
https://github.com/facebook/docusaurus.git
synced 2025-05-24 22:46:57 +02:00
test: enable a few jest eslint rules (#6900)
* test: enable a few jest eslint rules * more
This commit is contained in:
parent
1efc6c6091
commit
aa5a2d4c04
155 changed files with 3644 additions and 3478 deletions
|
@ -1,6 +1,6 @@
|
|||
// Jest Snapshot v1, https://goo.gl/fbAQLP
|
||||
|
||||
exports[`report broken markdown links 1`] = `
|
||||
exports[`linkify reports broken markdown links 1`] = `
|
||||
"---
|
||||
title: This post links to another one!
|
||||
---
|
||||
|
@ -15,7 +15,7 @@ title: This post links to another one!
|
|||
"
|
||||
`;
|
||||
|
||||
exports[`transform to correct link 1`] = `
|
||||
exports[`linkify transforms to correct link 1`] = `
|
||||
"---
|
||||
title: This post links to another one!
|
||||
---
|
|
@ -1,6 +1,6 @@
|
|||
// Jest Snapshot v1, https://goo.gl/fbAQLP
|
||||
|
||||
exports[`blogFeed atom shows feed item for each post 1`] = `
|
||||
exports[`atom has feed item for each post 1`] = `
|
||||
Array [
|
||||
"<?xml version=\\"1.0\\" encoding=\\"utf-8\\"?>
|
||||
<feed xmlns=\\"http://www.w3.org/2005/Atom\\">
|
||||
|
@ -84,7 +84,7 @@ Array [
|
|||
]
|
||||
`;
|
||||
|
||||
exports[`blogFeed json shows feed item for each post 1`] = `
|
||||
exports[`json has feed item for each post 1`] = `
|
||||
Array [
|
||||
"{
|
||||
\\"version\\": \\"https://jsonfeed.org/version/1\\",
|
||||
|
@ -171,7 +171,7 @@ Array [
|
|||
]
|
||||
`;
|
||||
|
||||
exports[`blogFeed rss shows feed item for each post 1`] = `
|
||||
exports[`rss has feed item for each post 1`] = `
|
||||
Array [
|
||||
"<?xml version=\\"1.0\\" encoding=\\"utf-8\\"?>
|
||||
<rss version=\\"2.0\\" xmlns:dc=\\"http://purl.org/dc/elements/1.1/\\" xmlns:content=\\"http://purl.org/rss/1.0/modules/content/\\">
|
||||
|
|
|
@ -1,6 +1,67 @@
|
|||
// Jest Snapshot v1, https://goo.gl/fbAQLP
|
||||
|
||||
exports[`loadBlog test blog tags 1`] = `
|
||||
exports[`blog plugin works on blog tags without pagination 1`] = `
|
||||
Object {
|
||||
"/blog/tags/tag-1": Object {
|
||||
"items": Array [
|
||||
"/simple/slug/another",
|
||||
"/another/tags",
|
||||
"/another/tags2",
|
||||
],
|
||||
"name": "tag1",
|
||||
"pages": Array [
|
||||
Object {
|
||||
"items": Array [
|
||||
"/simple/slug/another",
|
||||
"/another/tags",
|
||||
"/another/tags2",
|
||||
],
|
||||
"metadata": Object {
|
||||
"blogDescription": "Blog",
|
||||
"blogTitle": "Blog",
|
||||
"nextPage": null,
|
||||
"page": 1,
|
||||
"permalink": "/blog/tags/tag-1",
|
||||
"postsPerPage": 3,
|
||||
"previousPage": null,
|
||||
"totalCount": 3,
|
||||
"totalPages": 1,
|
||||
},
|
||||
},
|
||||
],
|
||||
"permalink": "/blog/tags/tag-1",
|
||||
},
|
||||
"/blog/tags/tag-2": Object {
|
||||
"items": Array [
|
||||
"/another/tags",
|
||||
"/another/tags2",
|
||||
],
|
||||
"name": "tag2",
|
||||
"pages": Array [
|
||||
Object {
|
||||
"items": Array [
|
||||
"/another/tags",
|
||||
"/another/tags2",
|
||||
],
|
||||
"metadata": Object {
|
||||
"blogDescription": "Blog",
|
||||
"blogTitle": "Blog",
|
||||
"nextPage": null,
|
||||
"page": 1,
|
||||
"permalink": "/blog/tags/tag-2",
|
||||
"postsPerPage": 2,
|
||||
"previousPage": null,
|
||||
"totalCount": 2,
|
||||
"totalPages": 1,
|
||||
},
|
||||
},
|
||||
],
|
||||
"permalink": "/blog/tags/tag-2",
|
||||
},
|
||||
}
|
||||
`;
|
||||
|
||||
exports[`blog plugin works with blog tags 1`] = `
|
||||
Object {
|
||||
"/blog/tags/tag-1": Object {
|
||||
"items": Array [
|
||||
|
@ -75,64 +136,3 @@ Object {
|
|||
},
|
||||
}
|
||||
`;
|
||||
|
||||
exports[`loadBlog test blog tags: no pagination 1`] = `
|
||||
Object {
|
||||
"/blog/tags/tag-1": Object {
|
||||
"items": Array [
|
||||
"/simple/slug/another",
|
||||
"/another/tags",
|
||||
"/another/tags2",
|
||||
],
|
||||
"name": "tag1",
|
||||
"pages": Array [
|
||||
Object {
|
||||
"items": Array [
|
||||
"/simple/slug/another",
|
||||
"/another/tags",
|
||||
"/another/tags2",
|
||||
],
|
||||
"metadata": Object {
|
||||
"blogDescription": "Blog",
|
||||
"blogTitle": "Blog",
|
||||
"nextPage": null,
|
||||
"page": 1,
|
||||
"permalink": "/blog/tags/tag-1",
|
||||
"postsPerPage": 3,
|
||||
"previousPage": null,
|
||||
"totalCount": 3,
|
||||
"totalPages": 1,
|
||||
},
|
||||
},
|
||||
],
|
||||
"permalink": "/blog/tags/tag-1",
|
||||
},
|
||||
"/blog/tags/tag-2": Object {
|
||||
"items": Array [
|
||||
"/another/tags",
|
||||
"/another/tags2",
|
||||
],
|
||||
"name": "tag2",
|
||||
"pages": Array [
|
||||
Object {
|
||||
"items": Array [
|
||||
"/another/tags",
|
||||
"/another/tags2",
|
||||
],
|
||||
"metadata": Object {
|
||||
"blogDescription": "Blog",
|
||||
"blogTitle": "Blog",
|
||||
"nextPage": null,
|
||||
"page": 1,
|
||||
"permalink": "/blog/tags/tag-2",
|
||||
"postsPerPage": 2,
|
||||
"previousPage": null,
|
||||
"totalCount": 2,
|
||||
"totalPages": 1,
|
||||
},
|
||||
},
|
||||
],
|
||||
"permalink": "/blog/tags/tag-2",
|
||||
},
|
||||
}
|
||||
`;
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
// Jest Snapshot v1, https://goo.gl/fbAQLP
|
||||
|
||||
exports[`should throw Error in case of invalid feedtype 1`] = `[ValidationError: "feedOptions.type" does not match any of the allowed types]`;
|
||||
exports[`blog plugin options schema throws Error in case of invalid feedtype 1`] = `[ValidationError: "feedOptions.type" does not match any of the allowed types]`;
|
||||
|
||||
exports[`should throw Error in case of invalid options 1`] = `[ValidationError: "postsPerPage" must be greater than or equal to 1]`;
|
||||
exports[`blog plugin options schema throws Error in case of invalid options 1`] = `[ValidationError: "postsPerPage" must be greater than or equal to 1]`;
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
// Jest Snapshot v1, https://goo.gl/fbAQLP
|
||||
|
||||
exports[`getContentTranslationFiles should return translation files matching snapshot 1`] = `
|
||||
exports[`getContentTranslationFiles returns translation files matching snapshot 1`] = `
|
||||
Array [
|
||||
Object {
|
||||
"content": Object {
|
||||
|
@ -22,7 +22,7 @@ Array [
|
|||
]
|
||||
`;
|
||||
|
||||
exports[`translateContent should fallback when translation is incomplete 1`] = `
|
||||
exports[`translateContent falls back when translation is incomplete 1`] = `
|
||||
Object {
|
||||
"blogListPaginated": Array [
|
||||
Object {
|
||||
|
@ -63,7 +63,7 @@ Object {
|
|||
}
|
||||
`;
|
||||
|
||||
exports[`translateContent should return translated loaded content matching snapshot 1`] = `
|
||||
exports[`translateContent returns translated loaded 1`] = `
|
||||
Object {
|
||||
"blogListPaginated": Array [
|
||||
Object {
|
||||
|
|
|
@ -14,7 +14,7 @@ import {
|
|||
import path from 'path';
|
||||
|
||||
describe('getBlogPostAuthors', () => {
|
||||
test('can read no authors', () => {
|
||||
it('can read no authors', () => {
|
||||
expect(
|
||||
getBlogPostAuthors({
|
||||
frontMatter: {},
|
||||
|
@ -31,7 +31,7 @@ describe('getBlogPostAuthors', () => {
|
|||
).toEqual([]);
|
||||
});
|
||||
|
||||
test('can read author from legacy front matter', () => {
|
||||
it('can read author from legacy front matter', () => {
|
||||
expect(
|
||||
getBlogPostAuthors({
|
||||
frontMatter: {
|
||||
|
@ -79,7 +79,7 @@ describe('getBlogPostAuthors', () => {
|
|||
]);
|
||||
});
|
||||
|
||||
test('can read authors string', () => {
|
||||
it('can read authors string', () => {
|
||||
expect(
|
||||
getBlogPostAuthors({
|
||||
frontMatter: {
|
||||
|
@ -90,7 +90,7 @@ describe('getBlogPostAuthors', () => {
|
|||
).toEqual([{key: 'slorber', name: 'Sébastien Lorber'}]);
|
||||
});
|
||||
|
||||
test('can read authors string[]', () => {
|
||||
it('can read authors string[]', () => {
|
||||
expect(
|
||||
getBlogPostAuthors({
|
||||
frontMatter: {
|
||||
|
@ -107,7 +107,7 @@ describe('getBlogPostAuthors', () => {
|
|||
]);
|
||||
});
|
||||
|
||||
test('can read authors Author', () => {
|
||||
it('can read authors Author', () => {
|
||||
expect(
|
||||
getBlogPostAuthors({
|
||||
frontMatter: {
|
||||
|
@ -118,7 +118,7 @@ describe('getBlogPostAuthors', () => {
|
|||
).toEqual([{name: 'Sébastien Lorber', title: 'maintainer'}]);
|
||||
});
|
||||
|
||||
test('can read authors Author[]', () => {
|
||||
it('can read authors Author[]', () => {
|
||||
expect(
|
||||
getBlogPostAuthors({
|
||||
frontMatter: {
|
||||
|
@ -135,7 +135,7 @@ describe('getBlogPostAuthors', () => {
|
|||
]);
|
||||
});
|
||||
|
||||
test('can read authors complex (string | Author)[] setup with keys and local overrides', () => {
|
||||
it('can read authors complex (string | Author)[] setup with keys and local overrides', () => {
|
||||
expect(
|
||||
getBlogPostAuthors({
|
||||
frontMatter: {
|
||||
|
@ -166,7 +166,7 @@ describe('getBlogPostAuthors', () => {
|
|||
]);
|
||||
});
|
||||
|
||||
test('throw when using author key with no authorsMap', () => {
|
||||
it('throw when using author key with no authorsMap', () => {
|
||||
expect(() =>
|
||||
getBlogPostAuthors({
|
||||
frontMatter: {
|
||||
|
@ -180,7 +180,7 @@ describe('getBlogPostAuthors', () => {
|
|||
`);
|
||||
});
|
||||
|
||||
test('throw when using author key with empty authorsMap', () => {
|
||||
it('throw when using author key with empty authorsMap', () => {
|
||||
expect(() =>
|
||||
getBlogPostAuthors({
|
||||
frontMatter: {
|
||||
|
@ -194,7 +194,7 @@ describe('getBlogPostAuthors', () => {
|
|||
`);
|
||||
});
|
||||
|
||||
test('throw when using bad author key in string', () => {
|
||||
it('throw when using bad author key in string', () => {
|
||||
expect(() =>
|
||||
getBlogPostAuthors({
|
||||
frontMatter: {
|
||||
|
@ -213,7 +213,7 @@ describe('getBlogPostAuthors', () => {
|
|||
`);
|
||||
});
|
||||
|
||||
test('throw when using bad author key in string[]', () => {
|
||||
it('throw when using bad author key in string[]', () => {
|
||||
expect(() =>
|
||||
getBlogPostAuthors({
|
||||
frontMatter: {
|
||||
|
@ -232,7 +232,7 @@ describe('getBlogPostAuthors', () => {
|
|||
`);
|
||||
});
|
||||
|
||||
test('throw when using bad author key in Author[].key', () => {
|
||||
it('throw when using bad author key in Author[].key', () => {
|
||||
expect(() =>
|
||||
getBlogPostAuthors({
|
||||
frontMatter: {
|
||||
|
@ -251,7 +251,7 @@ describe('getBlogPostAuthors', () => {
|
|||
`);
|
||||
});
|
||||
|
||||
test('throw when mixing legacy/new authors front matter', () => {
|
||||
it('throw when mixing legacy/new authors front matter', () => {
|
||||
expect(() =>
|
||||
getBlogPostAuthors({
|
||||
frontMatter: {
|
||||
|
@ -287,7 +287,7 @@ describe('getAuthorsMap', () => {
|
|||
contentPath: fixturesDir,
|
||||
};
|
||||
|
||||
test('getAuthorsMap can read yml file', async () => {
|
||||
it('getAuthorsMap can read yml file', async () => {
|
||||
await expect(
|
||||
getAuthorsMap({
|
||||
contentPaths,
|
||||
|
@ -296,7 +296,7 @@ describe('getAuthorsMap', () => {
|
|||
).resolves.toBeDefined();
|
||||
});
|
||||
|
||||
test('getAuthorsMap can read json file', async () => {
|
||||
it('getAuthorsMap can read json file', async () => {
|
||||
await expect(
|
||||
getAuthorsMap({
|
||||
contentPaths,
|
||||
|
@ -305,7 +305,7 @@ describe('getAuthorsMap', () => {
|
|||
).resolves.toBeDefined();
|
||||
});
|
||||
|
||||
test('getAuthorsMap can return undefined if yaml file not found', async () => {
|
||||
it('getAuthorsMap can return undefined if yaml file not found', async () => {
|
||||
await expect(
|
||||
getAuthorsMap({
|
||||
contentPaths,
|
||||
|
@ -316,7 +316,7 @@ describe('getAuthorsMap', () => {
|
|||
});
|
||||
|
||||
describe('validateAuthorsMap', () => {
|
||||
test('accept valid authors map', () => {
|
||||
it('accept valid authors map', () => {
|
||||
const authorsMap: AuthorsMap = {
|
||||
slorber: {
|
||||
name: 'Sébastien Lorber',
|
||||
|
@ -338,7 +338,7 @@ describe('validateAuthorsMap', () => {
|
|||
expect(validateAuthorsMap(authorsMap)).toEqual(authorsMap);
|
||||
});
|
||||
|
||||
test('rename snake case image_url to camelCase imageURL', () => {
|
||||
it('rename snake case image_url to camelCase imageURL', () => {
|
||||
const authorsMap: AuthorsMap = {
|
||||
slorber: {
|
||||
name: 'Sébastien Lorber',
|
||||
|
@ -353,7 +353,7 @@ describe('validateAuthorsMap', () => {
|
|||
});
|
||||
});
|
||||
|
||||
test('accept author with only image', () => {
|
||||
it('accept author with only image', () => {
|
||||
const authorsMap: AuthorsMap = {
|
||||
slorber: {
|
||||
imageURL: 'https://github.com/slorber.png',
|
||||
|
@ -363,7 +363,7 @@ describe('validateAuthorsMap', () => {
|
|||
expect(validateAuthorsMap(authorsMap)).toEqual(authorsMap);
|
||||
});
|
||||
|
||||
test('reject author without name or image', () => {
|
||||
it('reject author without name or image', () => {
|
||||
const authorsMap: AuthorsMap = {
|
||||
slorber: {
|
||||
title: 'foo',
|
||||
|
@ -376,7 +376,7 @@ describe('validateAuthorsMap', () => {
|
|||
);
|
||||
});
|
||||
|
||||
test('reject undefined author', () => {
|
||||
it('reject undefined author', () => {
|
||||
expect(() =>
|
||||
validateAuthorsMap({
|
||||
slorber: undefined,
|
||||
|
@ -384,7 +384,7 @@ describe('validateAuthorsMap', () => {
|
|||
).toThrowErrorMatchingInlineSnapshot(`"\\"slorber\\" is required"`);
|
||||
});
|
||||
|
||||
test('reject null author', () => {
|
||||
it('reject null author', () => {
|
||||
expect(() =>
|
||||
validateAuthorsMap({
|
||||
slorber: null,
|
||||
|
@ -394,7 +394,7 @@ describe('validateAuthorsMap', () => {
|
|||
);
|
||||
});
|
||||
|
||||
test('reject array author', () => {
|
||||
it('reject array author', () => {
|
||||
expect(() =>
|
||||
validateAuthorsMap({slorber: []}),
|
||||
).toThrowErrorMatchingInlineSnapshot(
|
||||
|
@ -402,14 +402,14 @@ describe('validateAuthorsMap', () => {
|
|||
);
|
||||
});
|
||||
|
||||
test('reject array content', () => {
|
||||
it('reject array content', () => {
|
||||
expect(() => validateAuthorsMap([])).toThrowErrorMatchingInlineSnapshot(
|
||||
// TODO improve this error message
|
||||
`"\\"value\\" must be of type object"`,
|
||||
);
|
||||
});
|
||||
|
||||
test('reject flat author', () => {
|
||||
it('reject flat author', () => {
|
||||
expect(() =>
|
||||
validateAuthorsMap({name: 'Sébastien'}),
|
||||
).toThrowErrorMatchingInlineSnapshot(
|
||||
|
@ -418,7 +418,7 @@ describe('validateAuthorsMap', () => {
|
|||
);
|
||||
});
|
||||
|
||||
test('reject non-map author', () => {
|
||||
it('reject non-map author', () => {
|
||||
const authorsMap: AuthorsMap = {
|
||||
// @ts-expect-error: for tests
|
||||
slorber: [],
|
||||
|
|
|
@ -24,13 +24,13 @@ function testField(params: {
|
|||
][];
|
||||
}) {
|
||||
describe(`"${params.fieldName}" field`, () => {
|
||||
test('accept valid values', () => {
|
||||
it('accept valid values', () => {
|
||||
params.validFrontMatters.forEach((frontMatter) => {
|
||||
expect(validateBlogPostFrontMatter(frontMatter)).toEqual(frontMatter);
|
||||
});
|
||||
});
|
||||
|
||||
test('convert valid values', () => {
|
||||
it('convert valid values', () => {
|
||||
params.convertibleFrontMatter?.forEach(
|
||||
([convertibleFrontMatter, convertedFrontMatter]) => {
|
||||
expect(validateBlogPostFrontMatter(convertibleFrontMatter)).toEqual(
|
||||
|
@ -40,7 +40,7 @@ function testField(params: {
|
|||
);
|
||||
});
|
||||
|
||||
test('throw error for values', () => {
|
||||
it('throw error for values', () => {
|
||||
params.invalidFrontMatters?.forEach(([frontMatter, message]) => {
|
||||
try {
|
||||
validateBlogPostFrontMatter(frontMatter);
|
||||
|
@ -64,12 +64,12 @@ function testField(params: {
|
|||
}
|
||||
|
||||
describe('validateBlogPostFrontMatter', () => {
|
||||
test('accept empty object', () => {
|
||||
it('accept empty object', () => {
|
||||
const frontMatter = {};
|
||||
expect(validateBlogPostFrontMatter(frontMatter)).toEqual(frontMatter);
|
||||
});
|
||||
|
||||
test('accept unknown field', () => {
|
||||
it('accept unknown field', () => {
|
||||
const frontMatter = {abc: '1'};
|
||||
expect(validateBlogPostFrontMatter(frontMatter)).toEqual(frontMatter);
|
||||
});
|
||||
|
@ -106,7 +106,7 @@ describe('validateBlogPostFrontMatter id', () => {
|
|||
});
|
||||
|
||||
describe('validateBlogPostFrontMatter handles legacy/new author front matter', () => {
|
||||
test('allow legacy author front matter', () => {
|
||||
it('allow legacy author front matter', () => {
|
||||
const frontMatter: BlogPostFrontMatter = {
|
||||
author: 'Sebastien',
|
||||
author_url: 'https://sebastienlorber.com',
|
||||
|
@ -116,7 +116,7 @@ describe('validateBlogPostFrontMatter handles legacy/new author front matter', (
|
|||
expect(validateBlogPostFrontMatter(frontMatter)).toEqual(frontMatter);
|
||||
});
|
||||
|
||||
test('allow new authors front matter', () => {
|
||||
it('allow new authors front matter', () => {
|
||||
const frontMatter: BlogPostFrontMatter = {
|
||||
authors: [
|
||||
'slorber',
|
||||
|
|
|
@ -5,10 +5,74 @@
|
|||
* LICENSE file in the root directory of this source tree.
|
||||
*/
|
||||
|
||||
import {truncate, parseBlogFileName} from '../blogUtils';
|
||||
import {jest} from '@jest/globals';
|
||||
import {
|
||||
truncate,
|
||||
parseBlogFileName,
|
||||
linkify,
|
||||
getSourceToPermalink,
|
||||
type LinkifyParams,
|
||||
} from '../blogUtils';
|
||||
import fs from 'fs-extra';
|
||||
import path from 'path';
|
||||
import type {
|
||||
BlogBrokenMarkdownLink,
|
||||
BlogContentPaths,
|
||||
BlogPost,
|
||||
} from '../types';
|
||||
|
||||
const siteDir = path.join(__dirname, '__fixtures__', 'website');
|
||||
const contentPaths: BlogContentPaths = {
|
||||
contentPath: path.join(siteDir, 'blog-with-ref'),
|
||||
contentPathLocalized: path.join(siteDir, 'blog-with-ref-localized'),
|
||||
};
|
||||
const pluginDir = 'blog-with-ref';
|
||||
const blogPosts: BlogPost[] = [
|
||||
{
|
||||
id: 'Happy 1st Birthday Slash!',
|
||||
metadata: {
|
||||
permalink: '/blog/2018/12/14/Happy-First-Birthday-Slash',
|
||||
source: path.posix.join(
|
||||
'@site',
|
||||
pluginDir,
|
||||
'2018-12-14-Happy-First-Birthday-Slash.md',
|
||||
),
|
||||
title: 'Happy 1st Birthday Slash!',
|
||||
description: `pattern name`,
|
||||
date: new Date('2018-12-14'),
|
||||
tags: [],
|
||||
prevItem: {
|
||||
permalink: '/blog/2019/01/01/date-matter',
|
||||
title: 'date-matter',
|
||||
},
|
||||
truncated: false,
|
||||
},
|
||||
},
|
||||
];
|
||||
|
||||
const transform = async (
|
||||
filePath: string,
|
||||
options?: Partial<LinkifyParams>,
|
||||
) => {
|
||||
const fileContent = await fs.readFile(filePath, 'utf-8');
|
||||
const transformedContent = linkify({
|
||||
filePath,
|
||||
fileString: fileContent,
|
||||
siteDir,
|
||||
contentPaths,
|
||||
sourceToPermalink: getSourceToPermalink(blogPosts),
|
||||
onBrokenMarkdownLink: (brokenMarkdownLink) => {
|
||||
throw new Error(
|
||||
`Broken markdown link found: ${JSON.stringify(brokenMarkdownLink)}`,
|
||||
);
|
||||
},
|
||||
...options,
|
||||
});
|
||||
return [fileContent, transformedContent];
|
||||
};
|
||||
|
||||
describe('truncate', () => {
|
||||
test('truncates texts', () => {
|
||||
it('truncates texts', () => {
|
||||
expect(
|
||||
truncate('aaa\n<!-- truncate -->\nbbb\nccc', /<!-- truncate -->/),
|
||||
).toEqual('aaa\n');
|
||||
|
@ -16,7 +80,8 @@ describe('truncate', () => {
|
|||
truncate('\n<!-- truncate -->\nbbb\nccc', /<!-- truncate -->/),
|
||||
).toEqual('\n');
|
||||
});
|
||||
test('leaves texts without markers', () => {
|
||||
|
||||
it('leaves texts without markers', () => {
|
||||
expect(truncate('aaa\nbbb\nccc', /<!-- truncate -->/)).toEqual(
|
||||
'aaa\nbbb\nccc',
|
||||
);
|
||||
|
@ -25,7 +90,7 @@ describe('truncate', () => {
|
|||
});
|
||||
|
||||
describe('parseBlogFileName', () => {
|
||||
test('parse file', () => {
|
||||
it('parses file', () => {
|
||||
expect(parseBlogFileName('some-post.md')).toEqual({
|
||||
date: undefined,
|
||||
text: 'some-post',
|
||||
|
@ -33,7 +98,7 @@ describe('parseBlogFileName', () => {
|
|||
});
|
||||
});
|
||||
|
||||
test('parse folder', () => {
|
||||
it('parses folder', () => {
|
||||
expect(parseBlogFileName('some-post/index.md')).toEqual({
|
||||
date: undefined,
|
||||
text: 'some-post',
|
||||
|
@ -41,7 +106,7 @@ describe('parseBlogFileName', () => {
|
|||
});
|
||||
});
|
||||
|
||||
test('parse nested file', () => {
|
||||
it('parses nested file', () => {
|
||||
expect(parseBlogFileName('some-post/some-file.md')).toEqual({
|
||||
date: undefined,
|
||||
text: 'some-post/some-file',
|
||||
|
@ -49,7 +114,7 @@ describe('parseBlogFileName', () => {
|
|||
});
|
||||
});
|
||||
|
||||
test('parse nested folder', () => {
|
||||
it('parses nested folder', () => {
|
||||
expect(parseBlogFileName('some-post/some-subfolder/index.md')).toEqual({
|
||||
date: undefined,
|
||||
text: 'some-post/some-subfolder',
|
||||
|
@ -57,7 +122,7 @@ describe('parseBlogFileName', () => {
|
|||
});
|
||||
});
|
||||
|
||||
test('parse file respecting date convention', () => {
|
||||
it('parses file respecting date convention', () => {
|
||||
expect(
|
||||
parseBlogFileName('2021-05-12-announcing-docusaurus-two-beta.md'),
|
||||
).toEqual({
|
||||
|
@ -67,7 +132,7 @@ describe('parseBlogFileName', () => {
|
|||
});
|
||||
});
|
||||
|
||||
test('parse folder name respecting date convention', () => {
|
||||
it('parses folder name respecting date convention', () => {
|
||||
expect(
|
||||
parseBlogFileName('2021-05-12-announcing-docusaurus-two-beta/index.md'),
|
||||
).toEqual({
|
||||
|
@ -77,7 +142,7 @@ describe('parseBlogFileName', () => {
|
|||
});
|
||||
});
|
||||
|
||||
test('parse folder tree respecting date convention', () => {
|
||||
it('parses folder tree respecting date convention', () => {
|
||||
expect(
|
||||
parseBlogFileName('2021/05/12/announcing-docusaurus-two-beta/index.md'),
|
||||
).toEqual({
|
||||
|
@ -87,7 +152,7 @@ describe('parseBlogFileName', () => {
|
|||
});
|
||||
});
|
||||
|
||||
test('parse folder name/tree (mixed) respecting date convention', () => {
|
||||
it('parses folder name/tree (mixed) respecting date convention', () => {
|
||||
expect(
|
||||
parseBlogFileName('2021/05-12-announcing-docusaurus-two-beta/index.md'),
|
||||
).toEqual({
|
||||
|
@ -97,7 +162,7 @@ describe('parseBlogFileName', () => {
|
|||
});
|
||||
});
|
||||
|
||||
test('parse nested folder tree respecting date convention', () => {
|
||||
it('parses nested folder tree respecting date convention', () => {
|
||||
expect(
|
||||
parseBlogFileName(
|
||||
'2021/05/12/announcing-docusaurus-two-beta/subfolder/subfile.md',
|
||||
|
@ -109,7 +174,7 @@ describe('parseBlogFileName', () => {
|
|||
});
|
||||
});
|
||||
|
||||
test('parse date in the middle of path', () => {
|
||||
it('parses date in the middle of path', () => {
|
||||
expect(
|
||||
parseBlogFileName('team-a/2021/05/12/announcing-docusaurus-two-beta.md'),
|
||||
).toEqual({
|
||||
|
@ -119,7 +184,7 @@ describe('parseBlogFileName', () => {
|
|||
});
|
||||
});
|
||||
|
||||
test('parse date in the middle of a folder name', () => {
|
||||
it('parses date in the middle of a folder name', () => {
|
||||
expect(
|
||||
parseBlogFileName(
|
||||
'team-a-2021-05-12-hey/announcing-docusaurus-two-beta.md',
|
||||
|
@ -131,3 +196,40 @@ describe('parseBlogFileName', () => {
|
|||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('linkify', () => {
|
||||
it('transforms to correct link', async () => {
|
||||
const post = path.join(contentPaths.contentPath, 'post.md');
|
||||
const [content, transformedContent] = await transform(post);
|
||||
expect(transformedContent).toMatchSnapshot();
|
||||
expect(transformedContent).toContain(
|
||||
'](/blog/2018/12/14/Happy-First-Birthday-Slash',
|
||||
);
|
||||
expect(transformedContent).not.toContain(
|
||||
'](2018-12-14-Happy-First-Birthday-Slash.md)',
|
||||
);
|
||||
expect(content).not.toEqual(transformedContent);
|
||||
});
|
||||
|
||||
it('reports broken markdown links', async () => {
|
||||
const filePath = 'post-with-broken-links.md';
|
||||
const folderPath = contentPaths.contentPath;
|
||||
const postWithBrokenLinks = path.join(folderPath, filePath);
|
||||
const onBrokenMarkdownLink = jest.fn();
|
||||
const [, transformedContent] = await transform(postWithBrokenLinks, {
|
||||
onBrokenMarkdownLink,
|
||||
});
|
||||
expect(transformedContent).toMatchSnapshot();
|
||||
expect(onBrokenMarkdownLink).toHaveBeenCalledTimes(2);
|
||||
expect(onBrokenMarkdownLink).toHaveBeenNthCalledWith(1, {
|
||||
filePath: path.resolve(folderPath, filePath),
|
||||
contentPaths,
|
||||
link: 'postNotExist1.md',
|
||||
} as BlogBrokenMarkdownLink);
|
||||
expect(onBrokenMarkdownLink).toHaveBeenNthCalledWith(2, {
|
||||
filePath: path.resolve(folderPath, filePath),
|
||||
contentPaths,
|
||||
link: './postNotExist2.mdx',
|
||||
} as BlogBrokenMarkdownLink);
|
||||
});
|
||||
});
|
||||
|
|
|
@ -52,85 +52,81 @@ async function testGenerateFeeds(
|
|||
});
|
||||
}
|
||||
|
||||
describe('blogFeed', () => {
|
||||
(['atom', 'rss', 'json'] as const).forEach((feedType) => {
|
||||
describe(`${feedType}`, () => {
|
||||
const fsMock = jest.spyOn(fs, 'outputFile').mockImplementation(() => {});
|
||||
describe.each(['atom', 'rss', 'json'])('%s', (feedType) => {
|
||||
const fsMock = jest.spyOn(fs, 'outputFile').mockImplementation(() => {});
|
||||
|
||||
test('should not show feed without posts', async () => {
|
||||
const siteDir = __dirname;
|
||||
const siteConfig = {
|
||||
title: 'Hello',
|
||||
baseUrl: '/',
|
||||
url: 'https://docusaurus.io',
|
||||
favicon: 'image/favicon.ico',
|
||||
};
|
||||
const outDir = path.join(siteDir, 'build-snap');
|
||||
it('does not get generated without posts', async () => {
|
||||
const siteDir = __dirname;
|
||||
const siteConfig = {
|
||||
title: 'Hello',
|
||||
baseUrl: '/',
|
||||
url: 'https://docusaurus.io',
|
||||
favicon: 'image/favicon.ico',
|
||||
};
|
||||
const outDir = path.join(siteDir, 'build-snap');
|
||||
|
||||
await testGenerateFeeds(
|
||||
{
|
||||
siteDir,
|
||||
siteConfig,
|
||||
i18n: DefaultI18N,
|
||||
outDir,
|
||||
} as LoadContext,
|
||||
{
|
||||
path: 'invalid-blog-path',
|
||||
routeBasePath: 'blog',
|
||||
tagsBasePath: 'tags',
|
||||
authorsMapPath: 'authors.yml',
|
||||
include: ['*.md', '*.mdx'],
|
||||
feedOptions: {
|
||||
type: [feedType],
|
||||
copyright: 'Copyright',
|
||||
},
|
||||
readingTime: ({content, defaultReadingTime}) =>
|
||||
defaultReadingTime({content}),
|
||||
} as PluginOptions,
|
||||
);
|
||||
await testGenerateFeeds(
|
||||
{
|
||||
siteDir,
|
||||
siteConfig,
|
||||
i18n: DefaultI18N,
|
||||
outDir,
|
||||
} as LoadContext,
|
||||
{
|
||||
path: 'invalid-blog-path',
|
||||
routeBasePath: 'blog',
|
||||
tagsBasePath: 'tags',
|
||||
authorsMapPath: 'authors.yml',
|
||||
include: ['*.md', '*.mdx'],
|
||||
feedOptions: {
|
||||
type: [feedType],
|
||||
copyright: 'Copyright',
|
||||
},
|
||||
readingTime: ({content, defaultReadingTime}) =>
|
||||
defaultReadingTime({content}),
|
||||
} as PluginOptions,
|
||||
);
|
||||
|
||||
expect(fsMock).toBeCalledTimes(0);
|
||||
fsMock.mockClear();
|
||||
});
|
||||
expect(fsMock).toBeCalledTimes(0);
|
||||
fsMock.mockClear();
|
||||
});
|
||||
|
||||
test('shows feed item for each post', async () => {
|
||||
const siteDir = path.join(__dirname, '__fixtures__', 'website');
|
||||
const outDir = path.join(siteDir, 'build-snap');
|
||||
const siteConfig = {
|
||||
title: 'Hello',
|
||||
baseUrl: '/myBaseUrl/',
|
||||
url: 'https://docusaurus.io',
|
||||
favicon: 'image/favicon.ico',
|
||||
};
|
||||
it('has feed item for each post', async () => {
|
||||
const siteDir = path.join(__dirname, '__fixtures__', 'website');
|
||||
const outDir = path.join(siteDir, 'build-snap');
|
||||
const siteConfig = {
|
||||
title: 'Hello',
|
||||
baseUrl: '/myBaseUrl/',
|
||||
url: 'https://docusaurus.io',
|
||||
favicon: 'image/favicon.ico',
|
||||
};
|
||||
|
||||
// Build is quite difficult to mock, so we built the blog beforehand and
|
||||
// copied the output to the fixture...
|
||||
await testGenerateFeeds(
|
||||
{
|
||||
siteDir,
|
||||
siteConfig,
|
||||
i18n: DefaultI18N,
|
||||
outDir,
|
||||
} as LoadContext,
|
||||
{
|
||||
path: 'blog',
|
||||
routeBasePath: 'blog',
|
||||
tagsBasePath: 'tags',
|
||||
authorsMapPath: 'authors.yml',
|
||||
include: DEFAULT_OPTIONS.include,
|
||||
exclude: DEFAULT_OPTIONS.exclude,
|
||||
feedOptions: {
|
||||
type: [feedType],
|
||||
copyright: 'Copyright',
|
||||
},
|
||||
readingTime: ({content, defaultReadingTime}) =>
|
||||
defaultReadingTime({content}),
|
||||
} as PluginOptions,
|
||||
);
|
||||
// Build is quite difficult to mock, so we built the blog beforehand and
|
||||
// copied the output to the fixture...
|
||||
await testGenerateFeeds(
|
||||
{
|
||||
siteDir,
|
||||
siteConfig,
|
||||
i18n: DefaultI18N,
|
||||
outDir,
|
||||
} as LoadContext,
|
||||
{
|
||||
path: 'blog',
|
||||
routeBasePath: 'blog',
|
||||
tagsBasePath: 'tags',
|
||||
authorsMapPath: 'authors.yml',
|
||||
include: DEFAULT_OPTIONS.include,
|
||||
exclude: DEFAULT_OPTIONS.exclude,
|
||||
feedOptions: {
|
||||
type: [feedType],
|
||||
copyright: 'Copyright',
|
||||
},
|
||||
readingTime: ({content, defaultReadingTime}) =>
|
||||
defaultReadingTime({content}),
|
||||
} as PluginOptions,
|
||||
);
|
||||
|
||||
expect(fsMock.mock.calls.map((call) => call[1])).toMatchSnapshot();
|
||||
fsMock.mockClear();
|
||||
});
|
||||
});
|
||||
expect(fsMock.mock.calls.map((call) => call[1])).toMatchSnapshot();
|
||||
fsMock.mockClear();
|
||||
});
|
||||
});
|
||||
|
|
|
@ -59,58 +59,58 @@ function validateAndNormalize(
|
|||
}
|
||||
}
|
||||
|
||||
describe('loadBlog', () => {
|
||||
const PluginPath = 'blog';
|
||||
const PluginPath = 'blog';
|
||||
|
||||
const BaseEditUrl = 'https://baseEditUrl.com/edit';
|
||||
const BaseEditUrl = 'https://baseEditUrl.com/edit';
|
||||
|
||||
const getPlugin = async (
|
||||
siteDir: string,
|
||||
pluginOptions: Partial<PluginOptions> = {},
|
||||
i18n: I18n = DefaultI18N,
|
||||
) => {
|
||||
const generatedFilesDir: string = path.resolve(siteDir, '.docusaurus');
|
||||
const siteConfig = {
|
||||
title: 'Hello',
|
||||
baseUrl: '/',
|
||||
url: 'https://docusaurus.io',
|
||||
} as DocusaurusConfig;
|
||||
return pluginContentBlog(
|
||||
{
|
||||
siteDir,
|
||||
siteConfig,
|
||||
generatedFilesDir,
|
||||
i18n,
|
||||
} as LoadContext,
|
||||
validateAndNormalize(PluginOptionSchema, {
|
||||
path: PluginPath,
|
||||
editUrl: BaseEditUrl,
|
||||
...pluginOptions,
|
||||
}),
|
||||
);
|
||||
};
|
||||
const getPlugin = async (
|
||||
siteDir: string,
|
||||
pluginOptions: Partial<PluginOptions> = {},
|
||||
i18n: I18n = DefaultI18N,
|
||||
) => {
|
||||
const generatedFilesDir: string = path.resolve(siteDir, '.docusaurus');
|
||||
const siteConfig = {
|
||||
title: 'Hello',
|
||||
baseUrl: '/',
|
||||
url: 'https://docusaurus.io',
|
||||
} as DocusaurusConfig;
|
||||
return pluginContentBlog(
|
||||
{
|
||||
siteDir,
|
||||
siteConfig,
|
||||
generatedFilesDir,
|
||||
i18n,
|
||||
} as LoadContext,
|
||||
validateAndNormalize(PluginOptionSchema, {
|
||||
path: PluginPath,
|
||||
editUrl: BaseEditUrl,
|
||||
...pluginOptions,
|
||||
}),
|
||||
);
|
||||
};
|
||||
|
||||
const getBlogPosts = async (
|
||||
siteDir: string,
|
||||
pluginOptions: Partial<PluginOptions> = {},
|
||||
i18n: I18n = DefaultI18N,
|
||||
) => {
|
||||
const plugin = await getPlugin(siteDir, pluginOptions, i18n);
|
||||
const {blogPosts} = (await plugin.loadContent!())!;
|
||||
return blogPosts;
|
||||
};
|
||||
const getBlogPosts = async (
|
||||
siteDir: string,
|
||||
pluginOptions: Partial<PluginOptions> = {},
|
||||
i18n: I18n = DefaultI18N,
|
||||
) => {
|
||||
const plugin = await getPlugin(siteDir, pluginOptions, i18n);
|
||||
const {blogPosts} = (await plugin.loadContent!())!;
|
||||
return blogPosts;
|
||||
};
|
||||
|
||||
const getBlogTags = async (
|
||||
siteDir: string,
|
||||
pluginOptions: Partial<PluginOptions> = {},
|
||||
i18n: I18n = DefaultI18N,
|
||||
) => {
|
||||
const plugin = await getPlugin(siteDir, pluginOptions, i18n);
|
||||
const {blogTags} = (await plugin.loadContent!())!;
|
||||
return blogTags;
|
||||
};
|
||||
const getBlogTags = async (
|
||||
siteDir: string,
|
||||
pluginOptions: Partial<PluginOptions> = {},
|
||||
i18n: I18n = DefaultI18N,
|
||||
) => {
|
||||
const plugin = await getPlugin(siteDir, pluginOptions, i18n);
|
||||
const {blogTags} = (await plugin.loadContent!())!;
|
||||
return blogTags;
|
||||
};
|
||||
|
||||
test('getPathsToWatch', async () => {
|
||||
describe('blog plugin', () => {
|
||||
it('getPathsToWatch returns right files', async () => {
|
||||
const siteDir = path.join(__dirname, '__fixtures__', 'website');
|
||||
const plugin = await getPlugin(siteDir);
|
||||
const pathsToWatch = plugin.getPathsToWatch!();
|
||||
|
@ -124,7 +124,7 @@ describe('loadBlog', () => {
|
|||
]);
|
||||
});
|
||||
|
||||
test('simple website', async () => {
|
||||
it('builds a simple website', async () => {
|
||||
const siteDir = path.join(__dirname, '__fixtures__', 'website');
|
||||
const blogPosts = await getBlogPosts(siteDir);
|
||||
|
||||
|
@ -303,7 +303,7 @@ describe('loadBlog', () => {
|
|||
});
|
||||
});
|
||||
|
||||
test('simple website blog dates localized', async () => {
|
||||
it('builds simple website blog with localized dates', async () => {
|
||||
const siteDir = path.join(__dirname, '__fixtures__', 'website');
|
||||
const blogPostsFrench = await getBlogPosts(siteDir, {}, getI18n('fr'));
|
||||
expect(blogPostsFrench).toHaveLength(8);
|
||||
|
@ -333,7 +333,7 @@ describe('loadBlog', () => {
|
|||
);
|
||||
});
|
||||
|
||||
test('edit url with editLocalizedBlogs true', async () => {
|
||||
it('handles edit URL with editLocalizedBlogs: true', async () => {
|
||||
const siteDir = path.join(__dirname, '__fixtures__', 'website');
|
||||
const blogPosts = await getBlogPosts(siteDir, {editLocalizedFiles: true});
|
||||
|
||||
|
@ -346,7 +346,7 @@ describe('loadBlog', () => {
|
|||
);
|
||||
});
|
||||
|
||||
test('edit url with editUrl function', async () => {
|
||||
it('handles edit URL with editUrl function', async () => {
|
||||
const siteDir = path.join(__dirname, '__fixtures__', 'website');
|
||||
|
||||
const hardcodedEditUrl = 'hardcoded-edit-url';
|
||||
|
@ -410,7 +410,7 @@ describe('loadBlog', () => {
|
|||
});
|
||||
});
|
||||
|
||||
test('draft blog post not exists in production build', async () => {
|
||||
it('excludes draft blog post from production build', async () => {
|
||||
process.env.NODE_ENV = 'production';
|
||||
const siteDir = path.join(__dirname, '__fixtures__', 'website');
|
||||
const blogPosts = await getBlogPosts(siteDir);
|
||||
|
@ -418,7 +418,7 @@ describe('loadBlog', () => {
|
|||
expect(blogPosts.find((v) => v.metadata.title === 'draft')).toBeUndefined();
|
||||
});
|
||||
|
||||
test('create blog post without date', async () => {
|
||||
it('creates blog post without date', async () => {
|
||||
const siteDir = path.join(
|
||||
__dirname,
|
||||
'__fixtures__',
|
||||
|
@ -457,7 +457,7 @@ describe('loadBlog', () => {
|
|||
});
|
||||
});
|
||||
|
||||
test('test ascending sort direction of blog post', async () => {
|
||||
it('can sort blog posts in ascending order', async () => {
|
||||
const siteDir = path.join(__dirname, '__fixtures__', 'website');
|
||||
const normalOrder = await getBlogPosts(siteDir);
|
||||
const reversedOrder = await getBlogPosts(siteDir, {
|
||||
|
@ -468,7 +468,7 @@ describe('loadBlog', () => {
|
|||
);
|
||||
});
|
||||
|
||||
test('test blog tags', async () => {
|
||||
it('works with blog tags', async () => {
|
||||
const siteDir = path.join(
|
||||
__dirname,
|
||||
'__fixtures__',
|
||||
|
@ -482,7 +482,7 @@ describe('loadBlog', () => {
|
|||
expect(blogTags).toMatchSnapshot();
|
||||
});
|
||||
|
||||
test('test blog tags: no pagination', async () => {
|
||||
it('works on blog tags without pagination', async () => {
|
||||
const siteDir = path.join(
|
||||
__dirname,
|
||||
'__fixtures__',
|
||||
|
|
|
@ -1,101 +0,0 @@
|
|||
/**
|
||||
* Copyright (c) Facebook, Inc. and its affiliates.
|
||||
*
|
||||
* This source code is licensed under the MIT license found in the
|
||||
* LICENSE file in the root directory of this source tree.
|
||||
*/
|
||||
|
||||
import {jest} from '@jest/globals';
|
||||
import fs from 'fs-extra';
|
||||
import path from 'path';
|
||||
import {linkify, type LinkifyParams, getSourceToPermalink} from '../blogUtils';
|
||||
import type {
|
||||
BlogBrokenMarkdownLink,
|
||||
BlogContentPaths,
|
||||
BlogPost,
|
||||
} from '../types';
|
||||
|
||||
const siteDir = path.join(__dirname, '__fixtures__', 'website');
|
||||
const contentPaths: BlogContentPaths = {
|
||||
contentPath: path.join(siteDir, 'blog-with-ref'),
|
||||
contentPathLocalized: path.join(siteDir, 'blog-with-ref-localized'),
|
||||
};
|
||||
const pluginDir = 'blog-with-ref';
|
||||
const blogPosts: BlogPost[] = [
|
||||
{
|
||||
id: 'Happy 1st Birthday Slash!',
|
||||
metadata: {
|
||||
permalink: '/blog/2018/12/14/Happy-First-Birthday-Slash',
|
||||
source: path.posix.join(
|
||||
'@site',
|
||||
pluginDir,
|
||||
'2018-12-14-Happy-First-Birthday-Slash.md',
|
||||
),
|
||||
title: 'Happy 1st Birthday Slash!',
|
||||
description: `pattern name`,
|
||||
date: new Date('2018-12-14'),
|
||||
tags: [],
|
||||
prevItem: {
|
||||
permalink: '/blog/2019/01/01/date-matter',
|
||||
title: 'date-matter',
|
||||
},
|
||||
truncated: false,
|
||||
},
|
||||
},
|
||||
];
|
||||
|
||||
const transform = async (
|
||||
filePath: string,
|
||||
options?: Partial<LinkifyParams>,
|
||||
) => {
|
||||
const fileContent = await fs.readFile(filePath, 'utf-8');
|
||||
const transformedContent = linkify({
|
||||
filePath,
|
||||
fileString: fileContent,
|
||||
siteDir,
|
||||
contentPaths,
|
||||
sourceToPermalink: getSourceToPermalink(blogPosts),
|
||||
onBrokenMarkdownLink: (brokenMarkdownLink) => {
|
||||
throw new Error(
|
||||
`Broken markdown link found: ${JSON.stringify(brokenMarkdownLink)}`,
|
||||
);
|
||||
},
|
||||
...options,
|
||||
});
|
||||
return [fileContent, transformedContent];
|
||||
};
|
||||
|
||||
test('transform to correct link', async () => {
|
||||
const post = path.join(contentPaths.contentPath, 'post.md');
|
||||
const [content, transformedContent] = await transform(post);
|
||||
expect(transformedContent).toMatchSnapshot();
|
||||
expect(transformedContent).toContain(
|
||||
'](/blog/2018/12/14/Happy-First-Birthday-Slash',
|
||||
);
|
||||
expect(transformedContent).not.toContain(
|
||||
'](2018-12-14-Happy-First-Birthday-Slash.md)',
|
||||
);
|
||||
expect(content).not.toEqual(transformedContent);
|
||||
});
|
||||
|
||||
test('report broken markdown links', async () => {
|
||||
const filePath = 'post-with-broken-links.md';
|
||||
const folderPath = contentPaths.contentPath;
|
||||
const postWithBrokenLinks = path.join(folderPath, filePath);
|
||||
const onBrokenMarkdownLink = jest.fn();
|
||||
const [, transformedContent] = await transform(postWithBrokenLinks, {
|
||||
onBrokenMarkdownLink,
|
||||
});
|
||||
expect(transformedContent).toMatchSnapshot();
|
||||
expect(onBrokenMarkdownLink).toHaveBeenCalledTimes(2);
|
||||
expect(onBrokenMarkdownLink).toHaveBeenNthCalledWith(1, {
|
||||
filePath: path.resolve(folderPath, filePath),
|
||||
contentPaths,
|
||||
link: 'postNotExist1.md',
|
||||
} as BlogBrokenMarkdownLink);
|
||||
expect(onBrokenMarkdownLink).toHaveBeenNthCalledWith(2, {
|
||||
filePath: path.resolve(folderPath, filePath),
|
||||
contentPaths,
|
||||
link: './postNotExist2.mdx',
|
||||
} as BlogBrokenMarkdownLink);
|
||||
});
|
|
@ -11,121 +11,123 @@ import {PluginOptionSchema, DEFAULT_OPTIONS} from '../pluginOptionSchema';
|
|||
const markdownPluginsFunctionStub = () => {};
|
||||
const markdownPluginsObjectStub = {};
|
||||
|
||||
test('should normalize options', () => {
|
||||
const {value, error} = PluginOptionSchema.validate({});
|
||||
expect(value).toEqual(DEFAULT_OPTIONS);
|
||||
expect(error).toBe(undefined);
|
||||
});
|
||||
|
||||
test('should accept correctly defined user options', () => {
|
||||
const userOptions = {
|
||||
...DEFAULT_OPTIONS,
|
||||
feedOptions: {type: 'rss', title: 'myTitle'},
|
||||
path: 'not_blog',
|
||||
routeBasePath: 'myBlog',
|
||||
postsPerPage: 5,
|
||||
include: ['api/*', 'docs/*'],
|
||||
};
|
||||
const {value, error} = PluginOptionSchema.validate(userOptions);
|
||||
expect(value).toEqual({
|
||||
...userOptions,
|
||||
feedOptions: {type: ['rss'], title: 'myTitle', copyright: ''},
|
||||
});
|
||||
expect(error).toBe(undefined);
|
||||
});
|
||||
|
||||
test('should accept valid user options', async () => {
|
||||
const userOptions = {
|
||||
...DEFAULT_OPTIONS,
|
||||
routeBasePath: 'myBlog',
|
||||
beforeDefaultRemarkPlugins: [],
|
||||
beforeDefaultRehypePlugins: [markdownPluginsFunctionStub],
|
||||
remarkPlugins: [[markdownPluginsFunctionStub, {option1: '42'}]],
|
||||
rehypePlugins: [
|
||||
markdownPluginsObjectStub,
|
||||
[markdownPluginsFunctionStub, {option1: '42'}],
|
||||
],
|
||||
};
|
||||
const {value, error} = await PluginOptionSchema.validate(userOptions);
|
||||
expect(value).toEqual(userOptions);
|
||||
expect(error).toBe(undefined);
|
||||
});
|
||||
|
||||
test('should throw Error in case of invalid options', () => {
|
||||
const {error} = PluginOptionSchema.validate({
|
||||
path: 'not_blog',
|
||||
postsPerPage: -1,
|
||||
include: ['api/*', 'docs/*'],
|
||||
routeBasePath: 'not_blog',
|
||||
describe('blog plugin options schema', () => {
|
||||
it('normalizes options', () => {
|
||||
const {value, error} = PluginOptionSchema.validate({});
|
||||
expect(value).toEqual(DEFAULT_OPTIONS);
|
||||
expect(error).toBe(undefined);
|
||||
});
|
||||
|
||||
expect(error).toMatchSnapshot();
|
||||
});
|
||||
|
||||
test('should throw Error in case of invalid feedtype', () => {
|
||||
const {error} = PluginOptionSchema.validate({
|
||||
feedOptions: {
|
||||
type: 'none',
|
||||
},
|
||||
it('accepts correctly defined user options', () => {
|
||||
const userOptions = {
|
||||
...DEFAULT_OPTIONS,
|
||||
feedOptions: {type: 'rss', title: 'myTitle'},
|
||||
path: 'not_blog',
|
||||
routeBasePath: 'myBlog',
|
||||
postsPerPage: 5,
|
||||
include: ['api/*', 'docs/*'],
|
||||
};
|
||||
const {value, error} = PluginOptionSchema.validate(userOptions);
|
||||
expect(value).toEqual({
|
||||
...userOptions,
|
||||
feedOptions: {type: ['rss'], title: 'myTitle', copyright: ''},
|
||||
});
|
||||
expect(error).toBe(undefined);
|
||||
});
|
||||
|
||||
expect(error).toMatchSnapshot();
|
||||
});
|
||||
it('accepts valid user options', async () => {
|
||||
const userOptions = {
|
||||
...DEFAULT_OPTIONS,
|
||||
routeBasePath: 'myBlog',
|
||||
beforeDefaultRemarkPlugins: [],
|
||||
beforeDefaultRehypePlugins: [markdownPluginsFunctionStub],
|
||||
remarkPlugins: [[markdownPluginsFunctionStub, {option1: '42'}]],
|
||||
rehypePlugins: [
|
||||
markdownPluginsObjectStub,
|
||||
[markdownPluginsFunctionStub, {option1: '42'}],
|
||||
],
|
||||
};
|
||||
const {value, error} = PluginOptionSchema.validate(userOptions);
|
||||
expect(value).toEqual(userOptions);
|
||||
expect(error).toBe(undefined);
|
||||
});
|
||||
|
||||
test('should convert all feed type to array with other feed type', () => {
|
||||
const {value} = PluginOptionSchema.validate({
|
||||
feedOptions: {type: 'all'},
|
||||
});
|
||||
expect(value).toEqual({
|
||||
...DEFAULT_OPTIONS,
|
||||
feedOptions: {type: ['rss', 'atom', 'json'], copyright: ''},
|
||||
});
|
||||
});
|
||||
it('throws Error in case of invalid options', () => {
|
||||
const {error} = PluginOptionSchema.validate({
|
||||
path: 'not_blog',
|
||||
postsPerPage: -1,
|
||||
include: ['api/*', 'docs/*'],
|
||||
routeBasePath: 'not_blog',
|
||||
});
|
||||
|
||||
test('should accept null type and return same', () => {
|
||||
const {value, error} = PluginOptionSchema.validate({
|
||||
feedOptions: {type: null},
|
||||
expect(error).toMatchSnapshot();
|
||||
});
|
||||
expect(value).toEqual({
|
||||
...DEFAULT_OPTIONS,
|
||||
feedOptions: {type: null},
|
||||
});
|
||||
expect(error).toBe(undefined);
|
||||
});
|
||||
|
||||
test('should contain array with rss + atom for missing feed type', () => {
|
||||
const {value} = PluginOptionSchema.validate({
|
||||
feedOptions: {},
|
||||
});
|
||||
expect(value).toEqual(DEFAULT_OPTIONS);
|
||||
});
|
||||
it('throws Error in case of invalid feedtype', () => {
|
||||
const {error} = PluginOptionSchema.validate({
|
||||
feedOptions: {
|
||||
type: 'none',
|
||||
},
|
||||
});
|
||||
|
||||
test('should have array with rss + atom, title for missing feed type', () => {
|
||||
const {value} = PluginOptionSchema.validate({
|
||||
feedOptions: {title: 'title'},
|
||||
expect(error).toMatchSnapshot();
|
||||
});
|
||||
expect(value).toEqual({
|
||||
...DEFAULT_OPTIONS,
|
||||
feedOptions: {type: ['rss', 'atom'], title: 'title', copyright: ''},
|
||||
|
||||
it('converts all feed type to array with other feed type', () => {
|
||||
const {value} = PluginOptionSchema.validate({
|
||||
feedOptions: {type: 'all'},
|
||||
});
|
||||
expect(value).toEqual({
|
||||
...DEFAULT_OPTIONS,
|
||||
feedOptions: {type: ['rss', 'atom', 'json'], copyright: ''},
|
||||
});
|
||||
});
|
||||
|
||||
it('accepts null type and return same', () => {
|
||||
const {value, error} = PluginOptionSchema.validate({
|
||||
feedOptions: {type: null},
|
||||
});
|
||||
expect(value).toEqual({
|
||||
...DEFAULT_OPTIONS,
|
||||
feedOptions: {type: null},
|
||||
});
|
||||
expect(error).toBe(undefined);
|
||||
});
|
||||
|
||||
it('contains array with rss + atom for missing feed type', () => {
|
||||
const {value} = PluginOptionSchema.validate({
|
||||
feedOptions: {},
|
||||
});
|
||||
expect(value).toEqual(DEFAULT_OPTIONS);
|
||||
});
|
||||
|
||||
it('has array with rss + atom, title for missing feed type', () => {
|
||||
const {value} = PluginOptionSchema.validate({
|
||||
feedOptions: {title: 'title'},
|
||||
});
|
||||
expect(value).toEqual({
|
||||
...DEFAULT_OPTIONS,
|
||||
feedOptions: {type: ['rss', 'atom'], title: 'title', copyright: ''},
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('blog sidebar', () => {
|
||||
test('should accept 0 sidebar count', () => {
|
||||
it('accepts 0 sidebar count', () => {
|
||||
const userOptions = {blogSidebarCount: 0};
|
||||
const {value, error} = PluginOptionSchema.validate(userOptions);
|
||||
expect(value).toEqual({...DEFAULT_OPTIONS, ...userOptions});
|
||||
expect(error).toBe(undefined);
|
||||
});
|
||||
|
||||
test('should accept "ALL" sidebar count', () => {
|
||||
it('accepts "ALL" sidebar count', () => {
|
||||
const userOptions = {blogSidebarCount: 'ALL'};
|
||||
const {value, error} = PluginOptionSchema.validate(userOptions);
|
||||
expect(value).toEqual({...DEFAULT_OPTIONS, ...userOptions});
|
||||
expect(error).toBe(undefined);
|
||||
});
|
||||
|
||||
test('should reject "abcdef" sidebar count', () => {
|
||||
it('rejects "abcdef" sidebar count', () => {
|
||||
const userOptions = {blogSidebarCount: 'abcdef'};
|
||||
const {error} = PluginOptionSchema.validate(userOptions);
|
||||
expect(error).toMatchInlineSnapshot(
|
||||
|
@ -133,14 +135,14 @@ describe('blog sidebar', () => {
|
|||
);
|
||||
});
|
||||
|
||||
test('should accept "all posts" sidebar title', () => {
|
||||
it('accepts "all posts" sidebar title', () => {
|
||||
const userOptions = {blogSidebarTitle: 'all posts'};
|
||||
const {value, error} = PluginOptionSchema.validate(userOptions);
|
||||
expect(value).toEqual({...DEFAULT_OPTIONS, ...userOptions});
|
||||
expect(error).toBe(undefined);
|
||||
});
|
||||
|
||||
test('should reject 42 sidebar title', () => {
|
||||
it('rejects 42 sidebar title', () => {
|
||||
const userOptions = {blogSidebarTitle: 42};
|
||||
const {error} = PluginOptionSchema.validate(userOptions);
|
||||
expect(error).toMatchInlineSnapshot(
|
||||
|
|
|
@ -71,26 +71,26 @@ function getSampleTranslationFilesTranslated() {
|
|||
}
|
||||
|
||||
describe('getContentTranslationFiles', () => {
|
||||
test('should return translation files matching snapshot', async () => {
|
||||
it('returns translation files matching snapshot', async () => {
|
||||
expect(getSampleTranslationFiles()).toMatchSnapshot();
|
||||
});
|
||||
});
|
||||
|
||||
describe('translateContent', () => {
|
||||
test('should fallback when translation is incomplete', () => {
|
||||
it('falls back when translation is incomplete', () => {
|
||||
expect(
|
||||
translateContent(sampleBlogContent, [{path: 'foo', content: {}}]),
|
||||
).toMatchSnapshot();
|
||||
});
|
||||
|
||||
test('should not translate anything if translation files are untranslated', () => {
|
||||
it('does not translate anything if translation files are untranslated', () => {
|
||||
const translationFiles = getSampleTranslationFiles();
|
||||
expect(translateContent(sampleBlogContent, translationFiles)).toEqual(
|
||||
sampleBlogContent,
|
||||
);
|
||||
});
|
||||
|
||||
test('should return translated loaded content matching snapshot', () => {
|
||||
it('returns translated loaded', () => {
|
||||
const translationFiles = getSampleTranslationFilesTranslated();
|
||||
expect(
|
||||
translateContent(sampleBlogContent, translationFiles),
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue