mirror of
https://github.com/facebook/docusaurus.git
synced 2025-05-10 15:47:23 +02:00
chore: enable eslint-plugin-jest (#6375)
This commit is contained in:
parent
3e5944ef1f
commit
52db7320a6
32 changed files with 194 additions and 193 deletions
|
@ -29,6 +29,7 @@ module.exports = {
|
|||
'plugin:@typescript-eslint/eslint-recommended',
|
||||
'plugin:@typescript-eslint/recommended',
|
||||
'plugin:react-hooks/recommended',
|
||||
'plugin:jest/recommended',
|
||||
'airbnb',
|
||||
'prettier',
|
||||
],
|
||||
|
@ -40,7 +41,7 @@ module.exports = {
|
|||
},
|
||||
},
|
||||
reportUnusedDisableDirectives: true,
|
||||
plugins: ['react-hooks', 'header'],
|
||||
plugins: ['react-hooks', 'header', 'jest'],
|
||||
rules: {
|
||||
'react-hooks/rules-of-hooks': ERROR,
|
||||
'react-hooks/exhaustive-deps': ERROR,
|
||||
|
@ -171,6 +172,9 @@ module.exports = {
|
|||
],
|
||||
},
|
||||
],
|
||||
'jest/prefer-expect-resolves': WARNING,
|
||||
'jest/expect-expect': OFF,
|
||||
'jest/valid-title': OFF,
|
||||
},
|
||||
overrides: [
|
||||
{
|
||||
|
|
|
@ -38,15 +38,15 @@ describe('packages', () => {
|
|||
test('should contain repository and directory for every package', async () => {
|
||||
const packageJsonFiles = await getPackagesJsonFiles();
|
||||
|
||||
packageJsonFiles.forEach((packageJsonFile) => {
|
||||
if (packageJsonFile.content.private !== true) {
|
||||
packageJsonFiles
|
||||
.filter((packageJsonFile) => !packageJsonFile.content.private)
|
||||
.forEach((packageJsonFile) => {
|
||||
expect(packageJsonFile.content.repository).toEqual({
|
||||
type: 'git',
|
||||
url: 'https://github.com/facebook/docusaurus.git',
|
||||
directory: packageJsonFile.file.replace(/\/package\.json$/, ''),
|
||||
});
|
||||
}
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
/*
|
||||
|
@ -58,17 +58,19 @@ describe('packages', () => {
|
|||
test('should have publishConfig.access: "public" when name starts with @', async () => {
|
||||
const packageJsonFiles = await getPackagesJsonFiles();
|
||||
|
||||
packageJsonFiles.forEach((packageJsonFile) => {
|
||||
if (packageJsonFile.content.name.startsWith('@')) {
|
||||
// Unfortunately jest custom message do not exist in loops, so using an exception instead to show failing package file
|
||||
// (see https://github.com/facebook/jest/issues/3293)
|
||||
// expect(packageJsonFile.content.publishConfig?.access).toEqual('public');
|
||||
if (packageJsonFile.content.publishConfig?.access !== 'public') {
|
||||
throw new Error(
|
||||
`Package ${packageJsonFile.file} does not have publishConfig.access: 'public'`,
|
||||
);
|
||||
packageJsonFiles
|
||||
.filter((packageJsonFile) => packageJsonFile.content.name.startsWith('@'))
|
||||
.forEach((packageJsonFile) => {
|
||||
if (packageJsonFile) {
|
||||
// Unfortunately jest custom message do not exist in loops, so using an exception instead to show failing package file
|
||||
// (see https://github.com/facebook/jest/issues/3293)
|
||||
// expect(packageJsonFile.content.publishConfig?.access).toEqual('public');
|
||||
if (packageJsonFile.content.publishConfig?.access !== 'public') {
|
||||
throw new Error(
|
||||
`Package ${packageJsonFile.file} does not have publishConfig.access: 'public'`,
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
|
|
|
@ -90,6 +90,7 @@
|
|||
"eslint-config-prettier": "^8.3.0",
|
||||
"eslint-plugin-header": "^3.1.1",
|
||||
"eslint-plugin-import": "^2.25.3",
|
||||
"eslint-plugin-jest": "^25.7.0",
|
||||
"eslint-plugin-jsx-a11y": "^6.5.1",
|
||||
"eslint-plugin-react": "^7.27.0",
|
||||
"eslint-plugin-react-hooks": "^4.3.0",
|
||||
|
|
|
@ -32,7 +32,7 @@ test('should process all heading levels', async () => {
|
|||
|
||||
`;
|
||||
|
||||
expect(await getHeadings(md)).toEqual([
|
||||
await expect(getHeadings(md)).resolves.toEqual([
|
||||
{
|
||||
children: [
|
||||
{
|
||||
|
@ -126,7 +126,7 @@ some text
|
|||
|
||||
`;
|
||||
|
||||
expect(await getHeadings(md)).toEqual([
|
||||
await expect(getHeadings(md)).resolves.toEqual([
|
||||
{
|
||||
children: [
|
||||
{
|
||||
|
|
|
@ -10,7 +10,7 @@ import path from 'path';
|
|||
import fs from 'fs-extra';
|
||||
|
||||
describe('migration test', () => {
|
||||
test('simple website', () => {
|
||||
test('simple website', async () => {
|
||||
const siteDir = path.join(
|
||||
__dirname,
|
||||
'__fixtures__',
|
||||
|
@ -18,10 +18,12 @@ describe('migration test', () => {
|
|||
'website',
|
||||
);
|
||||
const newDir = path.join(__dirname, '__fixtures__', 'migrated_simple_site');
|
||||
migrateDocusaurusProject(siteDir, newDir);
|
||||
await expect(
|
||||
migrateDocusaurusProject(siteDir, newDir),
|
||||
).resolves.toBeUndefined();
|
||||
fs.removeSync(newDir);
|
||||
});
|
||||
test('complex website', () => {
|
||||
test('complex website', async () => {
|
||||
const siteDir = path.join(
|
||||
__dirname,
|
||||
'__fixtures__',
|
||||
|
@ -33,11 +35,13 @@ describe('migration test', () => {
|
|||
'__fixtures__',
|
||||
'migrated_complex_site',
|
||||
);
|
||||
migrateDocusaurusProject(siteDir, newDir);
|
||||
await expect(
|
||||
migrateDocusaurusProject(siteDir, newDir),
|
||||
).resolves.toBeUndefined();
|
||||
fs.removeSync(newDir);
|
||||
});
|
||||
|
||||
test('missing versions', () => {
|
||||
test('missing versions', async () => {
|
||||
const siteDir = path.join(
|
||||
__dirname,
|
||||
'__fixtures__',
|
||||
|
@ -49,7 +53,9 @@ describe('migration test', () => {
|
|||
'__fixtures__',
|
||||
'migrated_missing_version_site',
|
||||
);
|
||||
migrateDocusaurusProject(siteDir, newDir);
|
||||
await expect(
|
||||
migrateDocusaurusProject(siteDir, newDir),
|
||||
).resolves.toBeUndefined();
|
||||
fs.removeSync(newDir);
|
||||
});
|
||||
});
|
||||
|
|
|
@ -39,7 +39,7 @@ describe('collectRedirects', () => {
|
|||
);
|
||||
});
|
||||
|
||||
test('should collect redirects to html/exe extension', () => {
|
||||
test('should collect redirects from html/exe extension', () => {
|
||||
expect(
|
||||
collectRedirects(
|
||||
createTestPluginContext(
|
||||
|
|
|
@ -9,22 +9,24 @@ import {validateRedirect} from '../redirectValidation';
|
|||
|
||||
describe('validateRedirect', () => {
|
||||
test('validate good redirects without throwing', () => {
|
||||
validateRedirect({
|
||||
from: '/fromSomePath',
|
||||
to: '/toSomePath',
|
||||
});
|
||||
validateRedirect({
|
||||
from: '/from/Some/Path',
|
||||
to: '/toSomePath',
|
||||
});
|
||||
validateRedirect({
|
||||
from: '/fromSomePath',
|
||||
to: '/toSomePath',
|
||||
});
|
||||
validateRedirect({
|
||||
from: '/fromSomePath',
|
||||
to: '/to/Some/Path',
|
||||
});
|
||||
expect(() => {
|
||||
validateRedirect({
|
||||
from: '/fromSomePath',
|
||||
to: '/toSomePath',
|
||||
});
|
||||
validateRedirect({
|
||||
from: '/from/Some/Path',
|
||||
to: '/toSomePath',
|
||||
});
|
||||
validateRedirect({
|
||||
from: '/fromSomePath',
|
||||
to: '/toSomePath',
|
||||
});
|
||||
validateRedirect({
|
||||
from: '/fromSomePath',
|
||||
to: '/to/Some/Path',
|
||||
});
|
||||
}).not.toThrow();
|
||||
});
|
||||
|
||||
test('throw for bad redirects', () => {
|
||||
|
|
|
@ -288,30 +288,30 @@ describe('getAuthorsMap', () => {
|
|||
};
|
||||
|
||||
test('getAuthorsMap can read yml file', async () => {
|
||||
expect(
|
||||
await getAuthorsMap({
|
||||
await expect(
|
||||
getAuthorsMap({
|
||||
contentPaths,
|
||||
authorsMapPath: 'authors.yml',
|
||||
}),
|
||||
).toBeDefined();
|
||||
).resolves.toBeDefined();
|
||||
});
|
||||
|
||||
test('getAuthorsMap can read json file', async () => {
|
||||
expect(
|
||||
await getAuthorsMap({
|
||||
await expect(
|
||||
getAuthorsMap({
|
||||
contentPaths,
|
||||
authorsMapPath: 'authors.json',
|
||||
}),
|
||||
).toBeDefined();
|
||||
).resolves.toBeDefined();
|
||||
});
|
||||
|
||||
test('getAuthorsMap can return undefined if yaml file not found', async () => {
|
||||
expect(
|
||||
await getAuthorsMap({
|
||||
await expect(
|
||||
getAuthorsMap({
|
||||
contentPaths,
|
||||
authorsMapPath: 'authors_does_not_exist.yml',
|
||||
}),
|
||||
).toBeUndefined();
|
||||
).resolves.toBeUndefined();
|
||||
});
|
||||
});
|
||||
|
||||
|
|
|
@ -44,6 +44,7 @@ function testField(params: {
|
|||
params.invalidFrontMatters?.forEach(([frontMatter, message]) => {
|
||||
try {
|
||||
validateBlogPostFrontMatter(frontMatter);
|
||||
// eslint-disable-next-line jest/no-jasmine-globals
|
||||
fail(
|
||||
new Error(
|
||||
`Blog frontmatter is expected to be rejected, but was accepted successfully:\n ${JSON.stringify(
|
||||
|
@ -54,6 +55,7 @@ function testField(params: {
|
|||
),
|
||||
);
|
||||
} catch (e) {
|
||||
// eslint-disable-next-line jest/no-conditional-expect
|
||||
expect(e.message).toMatch(new RegExp(escapeStringRegexp(message)));
|
||||
}
|
||||
});
|
||||
|
|
|
@ -41,6 +41,7 @@ function testField(params: {
|
|||
params.invalidFrontMatters?.forEach(([frontMatter, message]) => {
|
||||
try {
|
||||
validateDocFrontMatter(frontMatter);
|
||||
// eslint-disable-next-line jest/no-jasmine-globals
|
||||
fail(
|
||||
new Error(
|
||||
`Doc frontmatter is expected to be rejected, but was accepted successfully:\n ${JSON.stringify(
|
||||
|
@ -51,6 +52,7 @@ function testField(params: {
|
|||
),
|
||||
);
|
||||
} catch (e) {
|
||||
// eslint-disable-next-line jest/no-conditional-expect
|
||||
expect(e.message).toMatch(new RegExp(escapeStringRegexp(message)));
|
||||
}
|
||||
});
|
||||
|
|
|
@ -52,20 +52,20 @@ describe('lastUpdate', () => {
|
|||
'__fixtures__',
|
||||
nonExistingFileName,
|
||||
);
|
||||
expect(await getFileLastUpdate(nonExistingFilePath)).toBeNull();
|
||||
await expect(getFileLastUpdate(nonExistingFilePath)).resolves.toBeNull();
|
||||
expect(consoleMock).toHaveBeenCalledTimes(1);
|
||||
expect(consoleMock).toHaveBeenLastCalledWith(
|
||||
expect.stringMatching(/with exit code 128/),
|
||||
);
|
||||
expect(await getFileLastUpdate(null)).toBeNull();
|
||||
expect(await getFileLastUpdate(undefined)).toBeNull();
|
||||
await expect(getFileLastUpdate(null)).resolves.toBeNull();
|
||||
await expect(getFileLastUpdate(undefined)).resolves.toBeNull();
|
||||
consoleMock.mockRestore();
|
||||
});
|
||||
|
||||
test('temporary created file that has no git timestamp', async () => {
|
||||
const tempFilePath = path.join(__dirname, '__fixtures__', '.temp');
|
||||
fs.writeFileSync(tempFilePath, 'Lorem ipsum :)');
|
||||
expect(await getFileLastUpdate(tempFilePath)).toBeNull();
|
||||
await expect(getFileLastUpdate(tempFilePath)).resolves.toBeNull();
|
||||
fs.unlinkSync(tempFilePath);
|
||||
});
|
||||
|
||||
|
|
|
@ -124,7 +124,7 @@ describe('createSidebarsUtils', () => {
|
|||
getFirstLink,
|
||||
} = createSidebarsUtils(sidebars);
|
||||
|
||||
test('getSidebarNameByDocId', async () => {
|
||||
test('getFirstDocIdOfFirstSidebar', async () => {
|
||||
expect(getFirstDocIdOfFirstSidebar()).toEqual('doc1');
|
||||
});
|
||||
|
||||
|
|
|
@ -9,7 +9,7 @@ import path from 'path';
|
|||
import {loadContext} from '@docusaurus/core/lib/server';
|
||||
|
||||
import pluginContentPages from '../index';
|
||||
import normalizePluginOptions from './pluginOptionSchema.test';
|
||||
import {PluginOptionSchema} from '../pluginOptionSchema';
|
||||
|
||||
describe('docusaurus-plugin-content-pages', () => {
|
||||
test('simple pages', async () => {
|
||||
|
@ -18,9 +18,9 @@ describe('docusaurus-plugin-content-pages', () => {
|
|||
const pluginPath = 'src/pages';
|
||||
const plugin = await pluginContentPages(
|
||||
context,
|
||||
normalizePluginOptions({
|
||||
PluginOptionSchema.validate({
|
||||
path: pluginPath,
|
||||
}),
|
||||
}).value,
|
||||
);
|
||||
const pagesMetadata = await plugin.loadContent?.();
|
||||
|
||||
|
@ -85,9 +85,9 @@ describe('docusaurus-plugin-content-pages', () => {
|
|||
currentLocale: 'fr',
|
||||
},
|
||||
},
|
||||
normalizePluginOptions({
|
||||
PluginOptionSchema.validate({
|
||||
path: pluginPath,
|
||||
}),
|
||||
}).value,
|
||||
);
|
||||
const pagesMetadata = await plugin.loadContent?.();
|
||||
|
||||
|
|
|
@ -8,7 +8,7 @@
|
|||
import {PluginOptionSchema, DEFAULT_OPTIONS} from '../pluginOptionSchema';
|
||||
import type {PluginOptions} from '@docusaurus/plugin-content-pages';
|
||||
|
||||
export default function normalizePluginOptions(
|
||||
function normalizePluginOptions(
|
||||
options: Partial<PluginOptions>,
|
||||
): PluginOptions {
|
||||
const {value, error} = PluginOptionSchema.validate(options, {
|
||||
|
|
|
@ -243,7 +243,7 @@ describe('themeConfig', () => {
|
|||
).toThrowErrorMatchingInlineSnapshot(`"Nested dropdowns are not allowed"`);
|
||||
});
|
||||
|
||||
test('should reject nested dropdowns', () => {
|
||||
test('should reject nested dropdowns 2', () => {
|
||||
const config = {
|
||||
navbar: {
|
||||
items: [
|
||||
|
@ -442,7 +442,7 @@ describe('themeConfig', () => {
|
|||
);
|
||||
});
|
||||
|
||||
test('should allow width and height specification for logo ', () => {
|
||||
test('should allow width and height specification for logo', () => {
|
||||
const altTagConfig = {
|
||||
navbar: {
|
||||
logo: {
|
||||
|
|
|
@ -46,7 +46,7 @@ function AnchorHeading({as: As, id, ...props}: Props) {
|
|||
);
|
||||
}
|
||||
|
||||
export default function Heading({as, ...props}: Props) {
|
||||
export default function Heading({as, ...props}: Props): JSX.Element {
|
||||
if (as === 'h1') {
|
||||
return (
|
||||
<h1
|
||||
|
|
|
@ -8,7 +8,7 @@
|
|||
import {useContextualSearchFilters} from '@docusaurus/theme-common';
|
||||
|
||||
// Translate search-engine agnostic search filters to Algolia search filters
|
||||
export function useAlgoliaContextualFacetFilters() {
|
||||
export function useAlgoliaContextualFacetFilters(): [string, string[]] {
|
||||
const {locale, tags} = useContextualSearchFilters();
|
||||
|
||||
// seems safe to convert locale->language, see AlgoliaSearchMetadata comment
|
||||
|
|
|
@ -62,7 +62,7 @@ describe('applyTrailingSlash', () => {
|
|||
).toEqual('/baseUrl/?query#anchor');
|
||||
});
|
||||
|
||||
test('should not apply to #anchor links ', () => {
|
||||
test('should not apply to #anchor links', () => {
|
||||
expect(applyTrailingSlash('#', params(true))).toEqual('#');
|
||||
expect(applyTrailingSlash('#', params(false))).toEqual('#');
|
||||
expect(applyTrailingSlash('#', params(undefined))).toEqual('#');
|
||||
|
|
|
@ -23,102 +23,104 @@ describe('getDataFilePath', () => {
|
|||
const contentPathNestedYml = path.join(fixturesDir, 'contentPathNestedYml');
|
||||
|
||||
test('getDataFilePath returns localized Yml path in priority', async () => {
|
||||
expect(
|
||||
await getDataFilePath({
|
||||
await expect(
|
||||
getDataFilePath({
|
||||
filePath: 'authors.yml',
|
||||
contentPaths: {
|
||||
contentPathLocalized: contentPathYml1,
|
||||
contentPath: contentPathYml2,
|
||||
},
|
||||
}),
|
||||
).toEqual(path.join(contentPathYml1, 'authors.yml'));
|
||||
expect(
|
||||
await getDataFilePath({
|
||||
).resolves.toEqual(path.join(contentPathYml1, 'authors.yml'));
|
||||
await expect(
|
||||
getDataFilePath({
|
||||
filePath: 'authors.yml',
|
||||
contentPaths: {
|
||||
contentPathLocalized: contentPathYml2,
|
||||
contentPath: contentPathYml1,
|
||||
},
|
||||
}),
|
||||
).toEqual(path.join(contentPathYml2, 'authors.yml'));
|
||||
).resolves.toEqual(path.join(contentPathYml2, 'authors.yml'));
|
||||
});
|
||||
|
||||
test('getDataFilePath returns localized Json path in priority', async () => {
|
||||
expect(
|
||||
await getDataFilePath({
|
||||
await expect(
|
||||
getDataFilePath({
|
||||
filePath: 'authors.json',
|
||||
contentPaths: {
|
||||
contentPathLocalized: contentPathJson1,
|
||||
contentPath: contentPathJson2,
|
||||
},
|
||||
}),
|
||||
).toEqual(path.join(contentPathJson1, 'authors.json'));
|
||||
expect(
|
||||
await getDataFilePath({
|
||||
).resolves.toEqual(path.join(contentPathJson1, 'authors.json'));
|
||||
await expect(
|
||||
getDataFilePath({
|
||||
filePath: 'authors.json',
|
||||
contentPaths: {
|
||||
contentPathLocalized: contentPathJson2,
|
||||
contentPath: contentPathJson1,
|
||||
},
|
||||
}),
|
||||
).toEqual(path.join(contentPathJson2, 'authors.json'));
|
||||
).resolves.toEqual(path.join(contentPathJson2, 'authors.json'));
|
||||
});
|
||||
|
||||
test('getDataFilePath returns unlocalized Yml path as fallback', async () => {
|
||||
expect(
|
||||
await getDataFilePath({
|
||||
await expect(
|
||||
getDataFilePath({
|
||||
filePath: 'authors.yml',
|
||||
contentPaths: {
|
||||
contentPathLocalized: contentPathEmpty,
|
||||
contentPath: contentPathYml2,
|
||||
},
|
||||
}),
|
||||
).toEqual(path.join(contentPathYml2, 'authors.yml'));
|
||||
).resolves.toEqual(path.join(contentPathYml2, 'authors.yml'));
|
||||
});
|
||||
|
||||
test('getDataFilePath returns unlocalized Json path as fallback', async () => {
|
||||
expect(
|
||||
await getDataFilePath({
|
||||
await expect(
|
||||
getDataFilePath({
|
||||
filePath: 'authors.json',
|
||||
contentPaths: {
|
||||
contentPathLocalized: contentPathEmpty,
|
||||
contentPath: contentPathJson1,
|
||||
},
|
||||
}),
|
||||
).toEqual(path.join(contentPathJson1, 'authors.json'));
|
||||
).resolves.toEqual(path.join(contentPathJson1, 'authors.json'));
|
||||
});
|
||||
|
||||
test('getDataFilePath can return undefined (file not found)', async () => {
|
||||
expect(
|
||||
await getDataFilePath({
|
||||
await expect(
|
||||
getDataFilePath({
|
||||
filePath: 'authors.json',
|
||||
contentPaths: {
|
||||
contentPathLocalized: contentPathEmpty,
|
||||
contentPath: contentPathYml1,
|
||||
},
|
||||
}),
|
||||
).toBeUndefined();
|
||||
expect(
|
||||
await getDataFilePath({
|
||||
).resolves.toBeUndefined();
|
||||
await expect(
|
||||
getDataFilePath({
|
||||
filePath: 'authors.yml',
|
||||
contentPaths: {
|
||||
contentPathLocalized: contentPathEmpty,
|
||||
contentPath: contentPathJson1,
|
||||
},
|
||||
}),
|
||||
).toBeUndefined();
|
||||
).resolves.toBeUndefined();
|
||||
});
|
||||
|
||||
test('getDataFilePath can return nested path', async () => {
|
||||
expect(
|
||||
await getDataFilePath({
|
||||
await expect(
|
||||
getDataFilePath({
|
||||
filePath: 'sub/folder/authors.yml',
|
||||
contentPaths: {
|
||||
contentPathLocalized: contentPathEmpty,
|
||||
contentPath: contentPathNestedYml,
|
||||
},
|
||||
}),
|
||||
).toEqual(path.join(contentPathNestedYml, 'sub/folder/authors.yml'));
|
||||
).resolves.toEqual(
|
||||
path.join(contentPathNestedYml, 'sub/folder/authors.yml'),
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
|
|
|
@ -321,11 +321,11 @@ describe('mapAsyncSequential', () => {
|
|||
const timeTotal = timeAfter - timeBefore;
|
||||
|
||||
const totalTimeouts = sum(Object.values(itemToTimeout));
|
||||
expect(timeTotal > totalTimeouts);
|
||||
expect(timeTotal).toBeGreaterThanOrEqual(totalTimeouts);
|
||||
|
||||
expect(itemMapStartsAt['1'] > 0);
|
||||
expect(itemMapStartsAt['2'] > itemMapEndsAt['1']);
|
||||
expect(itemMapStartsAt['3'] > itemMapEndsAt['2']);
|
||||
expect(itemMapStartsAt['1']).toBeGreaterThanOrEqual(0);
|
||||
expect(itemMapStartsAt['2']).toBeGreaterThanOrEqual(itemMapEndsAt['1']);
|
||||
expect(itemMapStartsAt['3']).toBeGreaterThanOrEqual(itemMapEndsAt['2']);
|
||||
});
|
||||
});
|
||||
|
||||
|
@ -353,8 +353,8 @@ describe('findAsyncSequential', () => {
|
|||
expect(findFn).toHaveBeenNthCalledWith(2, '2');
|
||||
|
||||
const timeTotal = timeAfter - timeBefore;
|
||||
expect(timeTotal > 100);
|
||||
expect(timeTotal < 150);
|
||||
expect(timeTotal).toBeGreaterThanOrEqual(100);
|
||||
expect(timeTotal).toBeLessThan(150);
|
||||
});
|
||||
});
|
||||
|
||||
|
|
|
@ -802,7 +802,7 @@ describe('parseMarkdownString', () => {
|
|||
`);
|
||||
});
|
||||
|
||||
test('should delete only first heading', () => {
|
||||
test('should delete only first heading 2', () => {
|
||||
expect(
|
||||
parseMarkdownString(dedent`
|
||||
# test
|
||||
|
|
|
@ -46,7 +46,7 @@ describe('normalizeFrontMatterTag', () => {
|
|||
expect(normalizeFrontMatterTag(tagsPath, input)).toEqual(expectedOutput);
|
||||
});
|
||||
|
||||
test('should normalize complex string tag', () => {
|
||||
test('should normalize complex string tag with object tag', () => {
|
||||
const tagsPath = '/all/tags';
|
||||
const input: Input = {
|
||||
label: 'tag complex Label',
|
||||
|
|
|
@ -22,7 +22,7 @@ describe('Interpolate', () => {
|
|||
);
|
||||
});
|
||||
|
||||
test('placeholders with string values', () => {
|
||||
test('placeholders with string values 2', () => {
|
||||
const text = '{number} {string} {object} {array}';
|
||||
const values = {
|
||||
number: 42,
|
||||
|
|
|
@ -43,8 +43,4 @@ describe('isInternalUrl', () => {
|
|||
test('should be false for undefined links', () => {
|
||||
expect(isInternalUrl(undefined)).toBeFalsy();
|
||||
});
|
||||
|
||||
test('should be true for root relative links', () => {
|
||||
expect(isInternalUrl('//reactjs.org')).toBeFalsy();
|
||||
});
|
||||
});
|
||||
|
|
|
@ -25,7 +25,7 @@ describe('remoteBranchUrl', () => {
|
|||
);
|
||||
expect(url).toEqual('https://user:pass@github.com/facebook/docusaurus.git');
|
||||
});
|
||||
test('should build a normal http url', () => {
|
||||
test('should build a normal http url with port', () => {
|
||||
const url = buildHttpsUrl(
|
||||
'user:pass',
|
||||
'github.com',
|
||||
|
|
|
@ -38,12 +38,6 @@ describe('transformMarkdownHeadingLine', () => {
|
|||
);
|
||||
});
|
||||
|
||||
test('works for simple level-2 heading', () => {
|
||||
expect(transformMarkdownHeadingLine('## ABC', createSlugger())).toEqual(
|
||||
'## ABC {#abc}',
|
||||
);
|
||||
});
|
||||
|
||||
test('unwraps markdown links', () => {
|
||||
const input = `## hello [facebook](https://facebook.com) [crowdin](https://crowdin.com/translate/docusaurus-v2/126/en-fr?filter=basic&value=0)`;
|
||||
expect(transformMarkdownHeadingLine(input, createSlugger())).toEqual(
|
||||
|
|
|
@ -75,7 +75,7 @@ describe('loadRoutes', () => {
|
|||
component: 'hello/world.js',
|
||||
} as RouteConfig;
|
||||
|
||||
expect(loadRoutes([routeConfigWithoutPath], '/')).rejects
|
||||
await expect(loadRoutes([routeConfigWithoutPath], '/')).rejects
|
||||
.toMatchInlineSnapshot(`
|
||||
[Error: Invalid route config: path must be a string and component is required.
|
||||
{"component":"hello/world.js"}]
|
||||
|
@ -85,7 +85,7 @@ describe('loadRoutes', () => {
|
|||
path: '/hello/world',
|
||||
} as RouteConfig;
|
||||
|
||||
expect(loadRoutes([routeConfigWithoutComponent], '/')).rejects
|
||||
await expect(loadRoutes([routeConfigWithoutComponent], '/')).rejects
|
||||
.toMatchInlineSnapshot(`
|
||||
[Error: Invalid route config: path must be a string and component is required.
|
||||
{"path":"/hello/world"}]
|
||||
|
|
|
@ -5,16 +5,16 @@ exports[`ensureUniquePluginInstanceIds reject multi instance plugins with same i
|
|||
To use the same plugin multiple times on a Docusaurus site, you need to assign a unique ID to each plugin instance."
|
||||
`;
|
||||
|
||||
exports[`ensureUniquePluginInstanceIds reject multi instance plugins with some without id 1`] = `
|
||||
"Plugin \\"plugin-docs\\" is used 2 times with ID \\"default\\".
|
||||
To use the same plugin multiple times on a Docusaurus site, you need to assign a unique ID to each plugin instance.
|
||||
|
||||
The plugin ID is \\"default\\" by default. It's possible that the preset you are using already includes a plugin instance, in which case you either want to disable the plugin in the preset (to use a single instance), or assign another ID to your extra plugin instance (to use multiple instances)."
|
||||
`;
|
||||
|
||||
exports[`ensureUniquePluginInstanceIds reject multi instance plugins without id 1`] = `
|
||||
"Plugin \\"plugin-docs\\" is used 2 times with ID \\"default\\".
|
||||
To use the same plugin multiple times on a Docusaurus site, you need to assign a unique ID to each plugin instance.
|
||||
|
||||
The plugin ID is \\"default\\" by default. It's possible that the preset you are using already includes a plugin instance, in which case you either want to disable the plugin in the preset (to use a single instance), or assign another ID to your extra plugin instance (to use multiple instances)."
|
||||
`;
|
||||
|
||||
exports[`ensureUniquePluginInstanceIds reject multi instance plugins without id 2`] = `
|
||||
"Plugin \\"plugin-docs\\" is used 2 times with ID \\"default\\".
|
||||
To use the same plugin multiple times on a Docusaurus site, you need to assign a unique ID to each plugin instance.
|
||||
|
||||
The plugin ID is \\"default\\" by default. It's possible that the preset you are using already includes a plugin instance, in which case you either want to disable the plugin in the preset (to use a single instance), or assign another ID to your extra plugin instance (to use multiple instances)."
|
||||
`;
|
||||
|
|
|
@ -59,7 +59,7 @@ describe('ensureUniquePluginInstanceIds', () => {
|
|||
).toThrowErrorMatchingSnapshot();
|
||||
});
|
||||
|
||||
test('reject multi instance plugins without id', async () => {
|
||||
test('reject multi instance plugins with some without id', async () => {
|
||||
expect(() =>
|
||||
ensureUniquePluginInstanceIds([
|
||||
createTestPlugin('plugin-docs'),
|
||||
|
|
|
@ -112,7 +112,7 @@ describe('writeTranslationFileContent', () => {
|
|||
},
|
||||
});
|
||||
|
||||
expect(await readFile()).toEqual({
|
||||
await expect(readFile()).resolves.toEqual({
|
||||
key1: {message: 'key1 message'},
|
||||
key2: {message: 'key2 message'},
|
||||
key3: {message: 'key3 message'},
|
||||
|
@ -134,7 +134,7 @@ describe('writeTranslationFileContent', () => {
|
|||
},
|
||||
});
|
||||
|
||||
expect(await readFile()).toEqual({
|
||||
await expect(readFile()).resolves.toEqual({
|
||||
key1: {message: 'PREFIX key1 message'},
|
||||
key2: {message: 'PREFIX key2 message'},
|
||||
key3: {message: 'PREFIX key3 message'},
|
||||
|
@ -158,7 +158,7 @@ describe('writeTranslationFileContent', () => {
|
|||
},
|
||||
});
|
||||
|
||||
expect(await readFile()).toEqual({
|
||||
await expect(readFile()).resolves.toEqual({
|
||||
key1: {message: 'key1 message'},
|
||||
key2: {message: 'key2 message'},
|
||||
key3: {message: 'key3 message'},
|
||||
|
@ -182,7 +182,7 @@ describe('writeTranslationFileContent', () => {
|
|||
},
|
||||
});
|
||||
|
||||
expect(await readFile()).toEqual({
|
||||
await expect(readFile()).resolves.toEqual({
|
||||
key1: {message: 'key1 message'},
|
||||
key2: {message: 'PREFIX key2 message new'},
|
||||
});
|
||||
|
@ -204,7 +204,7 @@ describe('writeTranslationFileContent', () => {
|
|||
},
|
||||
});
|
||||
|
||||
expect(await readFile()).toEqual({
|
||||
await expect(readFile()).resolves.toEqual({
|
||||
key1: {message: 'key1 message new'},
|
||||
key2: {message: 'key2 message new'},
|
||||
});
|
||||
|
@ -227,7 +227,7 @@ describe('writeTranslationFileContent', () => {
|
|||
},
|
||||
});
|
||||
|
||||
expect(await readFile()).toEqual({
|
||||
await expect(readFile()).resolves.toEqual({
|
||||
key1: {message: 'PREFIX key1 message new'},
|
||||
key2: {message: 'PREFIX key2 message new'},
|
||||
});
|
||||
|
@ -249,14 +249,14 @@ describe('writeTranslationFileContent', () => {
|
|||
},
|
||||
});
|
||||
|
||||
expect(await readFile()).toEqual({
|
||||
await expect(readFile()).resolves.toEqual({
|
||||
key1: {message: 'key1 message', description: undefined},
|
||||
key2: {message: 'key2 message', description: 'key2 desc new'},
|
||||
key3: {message: 'key3 message', description: 'key3 desc new'},
|
||||
});
|
||||
});
|
||||
|
||||
test('should always override message description', async () => {
|
||||
test('should throw for invalid content', async () => {
|
||||
const {filePath} = await createTmpTranslationFile(
|
||||
// @ts-expect-error: bad content on purpose
|
||||
{bad: 'content'},
|
||||
|
@ -305,7 +305,7 @@ describe('writePluginTranslations', () => {
|
|||
},
|
||||
});
|
||||
|
||||
expect(await readTranslationFileContent(filePath)).toEqual({
|
||||
await expect(readTranslationFileContent(filePath)).resolves.toEqual({
|
||||
key1: {message: 'key1 message'},
|
||||
key2: {message: 'key2 message'},
|
||||
key3: {message: 'key3 message'},
|
||||
|
@ -345,14 +345,16 @@ describe('writePluginTranslations', () => {
|
|||
});
|
||||
}
|
||||
|
||||
expect(await readTranslationFileContent(filePath)).toEqual(undefined);
|
||||
await expect(readTranslationFileContent(filePath)).resolves.toEqual(
|
||||
undefined,
|
||||
);
|
||||
|
||||
await doWritePluginTranslations({
|
||||
key1: {message: 'key1 message', description: 'key1 desc'},
|
||||
key2: {message: 'key2 message', description: 'key2 desc'},
|
||||
key3: {message: 'key3 message', description: 'key3 desc'},
|
||||
});
|
||||
expect(await readTranslationFileContent(filePath)).toEqual({
|
||||
await expect(readTranslationFileContent(filePath)).resolves.toEqual({
|
||||
key1: {message: 'key1 message', description: 'key1 desc'},
|
||||
key2: {message: 'key2 message', description: 'key2 desc'},
|
||||
key3: {message: 'key3 message', description: 'key3 desc'},
|
||||
|
@ -365,7 +367,7 @@ describe('writePluginTranslations', () => {
|
|||
},
|
||||
{messagePrefix: 'PREFIX '},
|
||||
);
|
||||
expect(await readTranslationFileContent(filePath)).toEqual({
|
||||
await expect(readTranslationFileContent(filePath)).resolves.toEqual({
|
||||
key1: {message: 'key1 message', description: 'key1 desc'},
|
||||
key2: {message: 'key2 message', description: 'key2 desc'},
|
||||
key3: {message: 'key3 message', description: undefined},
|
||||
|
@ -381,7 +383,7 @@ describe('writePluginTranslations', () => {
|
|||
},
|
||||
{messagePrefix: 'PREFIX ', override: true},
|
||||
);
|
||||
expect(await readTranslationFileContent(filePath)).toEqual({
|
||||
await expect(readTranslationFileContent(filePath)).resolves.toEqual({
|
||||
key1: {message: 'PREFIX key1 message 3', description: 'key1 desc'},
|
||||
key2: {message: 'PREFIX key2 message 3', description: 'key2 desc'},
|
||||
key3: {message: 'PREFIX key3 message 3', description: 'key3 desc'},
|
||||
|
@ -417,7 +419,7 @@ describe('localizePluginTranslationFile', () => {
|
|||
expect(localizedTranslationFile).toEqual(translationFile);
|
||||
});
|
||||
|
||||
test('not localize if localized file does not exist', async () => {
|
||||
test('not localize if localized file does not exist 2', async () => {
|
||||
const siteDir = await createTmpSiteDir();
|
||||
|
||||
await writeTranslationFileContent({
|
||||
|
|
|
@ -4,6 +4,7 @@
|
|||
* This source code is licensed under the MIT license found in the
|
||||
* LICENSE file in the root directory of this source tree.
|
||||
*/
|
||||
/* eslint-disable jest/no-conditional-expect */
|
||||
|
||||
const stylelint = require('stylelint');
|
||||
const path = require('path');
|
||||
|
@ -24,28 +25,21 @@ function testStylelintRule(config, tests) {
|
|||
if (tests.accept && tests.accept.length) {
|
||||
describe('accept cases', () => {
|
||||
tests.accept.forEach((testCase) => {
|
||||
const spec = testCase.only ? it.only : it;
|
||||
|
||||
spec(checkTestCaseContent(testCase), () => {
|
||||
test(checkTestCaseContent(testCase), async () => {
|
||||
const options = {
|
||||
code: testCase.code,
|
||||
config,
|
||||
syntax: tests.syntax,
|
||||
};
|
||||
|
||||
return stylelint.lint(options).then((output) => {
|
||||
expect(output.results[0].warnings).toEqual([]);
|
||||
|
||||
if (!tests.fix) {
|
||||
return null;
|
||||
}
|
||||
|
||||
// Check the fix.
|
||||
return stylelint
|
||||
.lint({...options, fix: true})
|
||||
.then((fixedOutput) => getOutputCss(fixedOutput))
|
||||
.then((fixedCode) => expect(fixedCode).toBe(testCase.fixed));
|
||||
});
|
||||
const output = await stylelint.lint(options);
|
||||
expect(output.results[0].warnings).toEqual([]);
|
||||
if (!tests.fix) {
|
||||
return;
|
||||
}
|
||||
const fixedOutput = await stylelint.lint({...options, fix: true});
|
||||
const fixedCode = getOutputCss(fixedOutput);
|
||||
expect(fixedCode).toBe(testCase.fixed);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
@ -54,51 +48,38 @@ function testStylelintRule(config, tests) {
|
|||
if (tests.reject && tests.reject.length) {
|
||||
describe('reject cases', () => {
|
||||
tests.reject.forEach((testCase) => {
|
||||
const skip = testCase.skip ? it.skip : it;
|
||||
const spec = testCase.only ? it.only : skip;
|
||||
|
||||
spec(checkTestCaseContent(testCase), () => {
|
||||
test(checkTestCaseContent(testCase), async () => {
|
||||
const options = {
|
||||
code: testCase.code,
|
||||
config,
|
||||
syntax: tests.syntax,
|
||||
};
|
||||
|
||||
return stylelint.lint(options).then((output) => {
|
||||
const {warnings} = output.results[0];
|
||||
const warning = warnings[0];
|
||||
|
||||
expect(warnings.length).toBeGreaterThanOrEqual(1);
|
||||
expect(testCase).toHaveMessage();
|
||||
|
||||
if (testCase.message != null) {
|
||||
expect(warning.text).toBe(testCase.message);
|
||||
}
|
||||
|
||||
if (testCase.line != null) {
|
||||
expect(warning.line).toBe(testCase.line);
|
||||
}
|
||||
|
||||
if (testCase.column != null) {
|
||||
expect(warning.column).toBe(testCase.column);
|
||||
}
|
||||
|
||||
if (!tests.fix) {
|
||||
return null;
|
||||
}
|
||||
|
||||
if (!testCase.fixed) {
|
||||
throw new Error(
|
||||
'If using { fix: true } in test tests, all reject cases must have { fixed: .. }',
|
||||
);
|
||||
}
|
||||
|
||||
// Check the fix.
|
||||
return stylelint
|
||||
.lint({...options, fix: true})
|
||||
.then((fixedOutput) => getOutputCss(fixedOutput))
|
||||
.then((fixedCode) => expect(fixedCode).toBe(testCase.fixed));
|
||||
});
|
||||
const output = await stylelint.lint(options);
|
||||
const {warnings} = output.results[0];
|
||||
const warning = warnings[0];
|
||||
expect(warnings.length).toBeGreaterThanOrEqual(1);
|
||||
expect(testCase).toHaveMessage();
|
||||
if (testCase.message != null) {
|
||||
expect(warning.text).toBe(testCase.message);
|
||||
}
|
||||
if (testCase.line != null) {
|
||||
expect(warning.line).toBe(testCase.line);
|
||||
}
|
||||
if (testCase.column != null) {
|
||||
expect(warning.column).toBe(testCase.column);
|
||||
}
|
||||
if (!tests.fix) {
|
||||
return;
|
||||
}
|
||||
if (!testCase.fixed) {
|
||||
throw new Error(
|
||||
'If using { fix: true } in test tests, all reject cases must have { fixed: .. }',
|
||||
);
|
||||
}
|
||||
const fixedOutput = await stylelint.lint({...options, fix: true});
|
||||
const fixedCode = getOutputCss(fixedOutput);
|
||||
expect(fixedCode).toBe(testCase.fixed);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
|
|
@ -4165,7 +4165,7 @@
|
|||
semver "^7.3.5"
|
||||
tsutils "^3.21.0"
|
||||
|
||||
"@typescript-eslint/experimental-utils@5.9.1":
|
||||
"@typescript-eslint/experimental-utils@5.9.1", "@typescript-eslint/experimental-utils@^5.0.0":
|
||||
version "5.9.1"
|
||||
resolved "https://registry.yarnpkg.com/@typescript-eslint/experimental-utils/-/experimental-utils-5.9.1.tgz#8c407c4dd5ffe522329df6e4c9c2b52206d5f7f1"
|
||||
integrity sha512-cb1Njyss0mLL9kLXgS/eEY53SZQ9sT519wpX3i+U457l2UXRDuo87hgKfgRazmu9/tQb0x2sr3Y0yrU+Zz0y+w==
|
||||
|
@ -8086,6 +8086,13 @@ eslint-plugin-import@^2.25.3:
|
|||
resolve "^1.20.0"
|
||||
tsconfig-paths "^3.12.0"
|
||||
|
||||
eslint-plugin-jest@^25.7.0:
|
||||
version "25.7.0"
|
||||
resolved "https://registry.yarnpkg.com/eslint-plugin-jest/-/eslint-plugin-jest-25.7.0.tgz#ff4ac97520b53a96187bad9c9814e7d00de09a6a"
|
||||
integrity sha512-PWLUEXeeF7C9QGKqvdSbzLOiLTx+bno7/HC9eefePfEb257QFHg7ye3dh80AZVkaa/RQsBB1Q/ORQvg2X7F0NQ==
|
||||
dependencies:
|
||||
"@typescript-eslint/experimental-utils" "^5.0.0"
|
||||
|
||||
eslint-plugin-jsx-a11y@^6.5.1:
|
||||
version "6.5.1"
|
||||
resolved "https://registry.yarnpkg.com/eslint-plugin-jsx-a11y/-/eslint-plugin-jsx-a11y-6.5.1.tgz#cdbf2df901040ca140b6ec14715c988889c2a6d8"
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue