mirror of
https://github.com/facebook/docusaurus.git
synced 2025-04-28 17:57:48 +02:00
fix test
This commit is contained in:
parent
2da34337a6
commit
ef937e402c
2 changed files with 48 additions and 29 deletions
|
@ -9,7 +9,7 @@ import {jest} from '@jest/globals';
|
||||||
import path from 'path';
|
import path from 'path';
|
||||||
import _ from 'lodash';
|
import _ from 'lodash';
|
||||||
import {handleBrokenLinks} from '../brokenLinks';
|
import {handleBrokenLinks} from '../brokenLinks';
|
||||||
import type {RouteConfig} from '@docusaurus/types';
|
import type {DocusaurusConfig, Props, RouteConfig} from '@docusaurus/types';
|
||||||
|
|
||||||
describe('handleBrokenLinks', () => {
|
describe('handleBrokenLinks', () => {
|
||||||
const routes: RouteConfig[] = [
|
const routes: RouteConfig[] = [
|
||||||
|
@ -136,10 +136,14 @@ describe('handleBrokenLinks', () => {
|
||||||
};
|
};
|
||||||
await handleBrokenLinks({
|
await handleBrokenLinks({
|
||||||
allCollectedLinks: allCollectedCorrectLinks,
|
allCollectedLinks: allCollectedCorrectLinks,
|
||||||
onBrokenLinks: 'error',
|
props: {
|
||||||
routes,
|
routes,
|
||||||
baseUrl: '/',
|
baseUrl: '/',
|
||||||
outDir,
|
outDir,
|
||||||
|
siteConfig: {
|
||||||
|
onBrokenLinks: 'error',
|
||||||
|
} as DocusaurusConfig,
|
||||||
|
} as Props,
|
||||||
});
|
});
|
||||||
expect(consoleMock).toBeCalledTimes(0);
|
expect(consoleMock).toBeCalledTimes(0);
|
||||||
});
|
});
|
||||||
|
@ -148,10 +152,14 @@ describe('handleBrokenLinks', () => {
|
||||||
await expect(() =>
|
await expect(() =>
|
||||||
handleBrokenLinks({
|
handleBrokenLinks({
|
||||||
allCollectedLinks,
|
allCollectedLinks,
|
||||||
onBrokenLinks: 'throw',
|
props: {
|
||||||
routes,
|
routes,
|
||||||
baseUrl: '/',
|
baseUrl: '/',
|
||||||
outDir,
|
outDir,
|
||||||
|
siteConfig: {
|
||||||
|
onBrokenLinks: 'throw',
|
||||||
|
} as DocusaurusConfig,
|
||||||
|
} as Props,
|
||||||
}),
|
}),
|
||||||
).rejects.toThrowErrorMatchingSnapshot();
|
).rejects.toThrowErrorMatchingSnapshot();
|
||||||
});
|
});
|
||||||
|
@ -162,10 +170,14 @@ describe('handleBrokenLinks', () => {
|
||||||
const lodashMock = jest.spyOn(_, 'mapValues');
|
const lodashMock = jest.spyOn(_, 'mapValues');
|
||||||
await handleBrokenLinks({
|
await handleBrokenLinks({
|
||||||
allCollectedLinks,
|
allCollectedLinks,
|
||||||
onBrokenLinks: 'ignore',
|
props: {
|
||||||
routes,
|
routes,
|
||||||
baseUrl: '/',
|
baseUrl: '/',
|
||||||
outDir,
|
outDir,
|
||||||
|
siteConfig: {
|
||||||
|
onBrokenLinks: 'ignore',
|
||||||
|
} as DocusaurusConfig,
|
||||||
|
} as Props,
|
||||||
});
|
});
|
||||||
expect(lodashMock).toBeCalledTimes(0);
|
expect(lodashMock).toBeCalledTimes(0);
|
||||||
lodashMock.mockRestore();
|
lodashMock.mockRestore();
|
||||||
|
@ -185,10 +197,14 @@ describe('handleBrokenLinks', () => {
|
||||||
await expect(() =>
|
await expect(() =>
|
||||||
handleBrokenLinks({
|
handleBrokenLinks({
|
||||||
allCollectedLinks,
|
allCollectedLinks,
|
||||||
onBrokenLinks: 'throw',
|
props: {
|
||||||
routes,
|
routes,
|
||||||
baseUrl: '/',
|
baseUrl: '/',
|
||||||
outDir,
|
outDir,
|
||||||
|
siteConfig: {
|
||||||
|
onBrokenLinks: 'throw',
|
||||||
|
} as DocusaurusConfig,
|
||||||
|
} as Props,
|
||||||
}),
|
}),
|
||||||
).rejects.toThrowErrorMatchingSnapshot();
|
).rejects.toThrowErrorMatchingSnapshot();
|
||||||
});
|
});
|
||||||
|
|
|
@ -52,8 +52,7 @@ function getPageBrokenLinks({
|
||||||
// @ts-expect-error: React router types RouteConfig with an actual React
|
// @ts-expect-error: React router types RouteConfig with an actual React
|
||||||
// component, but we load route components with string paths.
|
// component, but we load route components with string paths.
|
||||||
// We don't actually access component here, so it's fine.
|
// We don't actually access component here, so it's fine.
|
||||||
.map((l) => matchRoutes(routes, l))
|
.flatMap((l) => matchRoutes(routes, l));
|
||||||
.flat();
|
|
||||||
return matchedRoutes.length === 0;
|
return matchedRoutes.length === 0;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -78,10 +77,8 @@ function getAllBrokenLinks({
|
||||||
allCollectedLinks: {[location: string]: string[]};
|
allCollectedLinks: {[location: string]: string[]};
|
||||||
routes: RouteConfig[];
|
routes: RouteConfig[];
|
||||||
}): {[location: string]: BrokenLink[]} {
|
}): {[location: string]: BrokenLink[]} {
|
||||||
const filteredRoutes = filterIntermediateRoutes(routes);
|
|
||||||
|
|
||||||
const allBrokenLinks = _.mapValues(allCollectedLinks, (pageLinks, pagePath) =>
|
const allBrokenLinks = _.mapValues(allCollectedLinks, (pageLinks, pagePath) =>
|
||||||
getPageBrokenLinks({pageLinks, pagePath, routes: filteredRoutes}),
|
getPageBrokenLinks({pageLinks, pagePath, routes}),
|
||||||
);
|
);
|
||||||
|
|
||||||
return _.pickBy(allBrokenLinks, (brokenLinks) => brokenLinks.length > 0);
|
return _.pickBy(allBrokenLinks, (brokenLinks) => brokenLinks.length > 0);
|
||||||
|
@ -217,24 +214,29 @@ async function filterExistingFileLinks({
|
||||||
function findOrphanLinks({
|
function findOrphanLinks({
|
||||||
allCollectedLinks,
|
allCollectedLinks,
|
||||||
orphanPages,
|
orphanPages,
|
||||||
|
routes,
|
||||||
}: {
|
}: {
|
||||||
allCollectedLinks: {[location: string]: string[]};
|
allCollectedLinks: {[location: string]: string[]};
|
||||||
orphanPages: DocusaurusConfig['orphanPages'];
|
orphanPages: DocusaurusConfig['orphanPages'];
|
||||||
|
routes: RouteConfig[];
|
||||||
}) {
|
}) {
|
||||||
if (!orphanPages || orphanPages.onOrphanPage === 'ignore') {
|
if (!orphanPages || orphanPages.onOrphanPage === 'ignore') {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
const visited = new Set<string>();
|
const visited = new Set<string>();
|
||||||
function dfs(link: string) {
|
function dfs(link: string) {
|
||||||
if (visited.has(link)) {
|
// @ts-expect-error: see comment above
|
||||||
|
const normalLink = matchRoutes(routes, link)[0]?.match.path;
|
||||||
|
if (!normalLink || visited.has(normalLink)) {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
visited.add(link);
|
visited.add(normalLink);
|
||||||
allCollectedLinks[link]?.forEach((l) => dfs(resolvePathname(l, link)));
|
allCollectedLinks[normalLink]?.forEach((l) =>
|
||||||
|
dfs(resolvePathname(l, link)),
|
||||||
|
);
|
||||||
}
|
}
|
||||||
orphanPages.entryPoints.forEach(dfs);
|
orphanPages.entryPoints.forEach(dfs);
|
||||||
const orphaned = new Set(Object.keys(allCollectedLinks));
|
const orphaned = routes.map((r) => r.path).filter((l) => !visited.has(l));
|
||||||
visited.forEach((l) => orphaned.delete(l));
|
|
||||||
reportMessage(
|
reportMessage(
|
||||||
logger.interpolate`Orphan pages found: url=${Array.from(orphaned)}`,
|
logger.interpolate`Orphan pages found: url=${Array.from(orphaned)}`,
|
||||||
orphanPages.onOrphanPage,
|
orphanPages.onOrphanPage,
|
||||||
|
@ -244,7 +246,7 @@ function findOrphanLinks({
|
||||||
export async function handleBrokenLinks({
|
export async function handleBrokenLinks({
|
||||||
allCollectedLinks,
|
allCollectedLinks,
|
||||||
props: {
|
props: {
|
||||||
routes,
|
routes: allRoutes,
|
||||||
baseUrl,
|
baseUrl,
|
||||||
outDir,
|
outDir,
|
||||||
siteConfig: {onBrokenLinks, orphanPages},
|
siteConfig: {onBrokenLinks, orphanPages},
|
||||||
|
@ -253,7 +255,8 @@ export async function handleBrokenLinks({
|
||||||
allCollectedLinks: {[location: string]: string[]};
|
allCollectedLinks: {[location: string]: string[]};
|
||||||
props: Props;
|
props: Props;
|
||||||
}): Promise<void> {
|
}): Promise<void> {
|
||||||
findOrphanLinks({allCollectedLinks, orphanPages});
|
const routes = filterIntermediateRoutes(allRoutes);
|
||||||
|
findOrphanLinks({allCollectedLinks, orphanPages, routes});
|
||||||
|
|
||||||
if (onBrokenLinks === 'ignore') {
|
if (onBrokenLinks === 'ignore') {
|
||||||
return;
|
return;
|
||||||
|
|
Loading…
Add table
Reference in a new issue