diff --git a/packages/docusaurus-utils/src/__tests__/dataFileUtils.test.ts b/packages/docusaurus-utils/src/__tests__/dataFileUtils.test.ts index 54f92601cc..f58a155e99 100644 --- a/packages/docusaurus-utils/src/__tests__/dataFileUtils.test.ts +++ b/packages/docusaurus-utils/src/__tests__/dataFileUtils.test.ts @@ -173,7 +173,7 @@ describe('findFolderContainingFile', () => { await expect( findFolderContainingFile( ['/abcdef', '/gehij', __dirname, '/klmn'], - 'index.test.ts', + 'dataFileUtils.test.ts', ), ).resolves.toEqual(__dirname); }); @@ -190,16 +190,19 @@ describe('getFolderContainingFile', () => { await expect( getFolderContainingFile( ['/abcdef', '/gehij', __dirname, '/klmn'], - 'index.test.ts', + 'dataFileUtils.test.ts', ), ).resolves.toEqual(__dirname); }); test('throw if no folder contain such file', async () => { await expect( - getFolderContainingFile(['/abcdef', '/gehij', '/klmn'], 'index.test.ts'), + getFolderContainingFile( + ['/abcdef', '/gehij', '/klmn'], + 'dataFileUtils.test.ts', + ), ).rejects.toThrowErrorMatchingInlineSnapshot(` - "File \\"index.test.ts\\" does not exist in any of these folders: + "File \\"dataFileUtils.test.ts\\" does not exist in any of these folders: - /abcdef - /gehij - /klmn]" diff --git a/packages/docusaurus-utils/src/__tests__/emitUtils.test.ts b/packages/docusaurus-utils/src/__tests__/emitUtils.test.ts new file mode 100644 index 0000000000..938bb5007d --- /dev/null +++ b/packages/docusaurus-utils/src/__tests__/emitUtils.test.ts @@ -0,0 +1,154 @@ +/** + * Copyright (c) Facebook, Inc. and its affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + */ + +import {genChunkName, readOutputHTMLFile, generate} from '../emitUtils'; +import path from 'path'; +import fs from 'fs-extra'; + +test('genChunkName', () => { + const firstAssert: Record = { + '/docs/adding-blog': 'docs-adding-blog-062', + '/docs/versioning': 'docs-versioning-8a8', + '/': 'index', + '/blog/2018/04/30/How-I-Converted-Profilo-To-Docusaurus': + 'blog-2018-04-30-how-i-converted-profilo-to-docusaurus-4f2', + '/youtube': 'youtube-429', + '/users/en/': 'users-en-f7a', + '/blog': 'blog-c06', + }; + Object.keys(firstAssert).forEach((str) => { + expect(genChunkName(str)).toBe(firstAssert[str]); + }); + + // Don't allow different chunk name for same path. + expect(genChunkName('path/is/similar', 'oldPrefix')).toEqual( + genChunkName('path/is/similar', 'newPrefix'), + ); + + // Even with same preferred name, still different chunk name for + // different path + const secondAssert: Record = { + '/blog/1': 'blog-85-f-089', + '/blog/2': 'blog-353-489', + }; + Object.keys(secondAssert).forEach((str) => { + expect(genChunkName(str, undefined, 'blog')).toBe(secondAssert[str]); + }); + + // Only generate short unique id + const thirdAssert: Record = { + a: '0cc175b9', + b: '92eb5ffe', + c: '4a8a08f0', + d: '8277e091', + }; + Object.keys(thirdAssert).forEach((str) => { + expect(genChunkName(str, undefined, undefined, true)).toBe( + thirdAssert[str], + ); + }); + expect(genChunkName('d', undefined, undefined, true)).toBe('8277e091'); +}); + +describe('readOutputHTMLFile', () => { + test('trailing slash undefined', async () => { + await expect( + readOutputHTMLFile( + '/file', + path.join(__dirname, '__fixtures__/build-snap'), + undefined, + ).then(String), + ).resolves.toEqual('file\n'); + await expect( + readOutputHTMLFile( + '/folder', + path.join(__dirname, '__fixtures__/build-snap'), + undefined, + ).then(String), + ).resolves.toEqual('folder\n'); + await expect( + readOutputHTMLFile( + '/file/', + path.join(__dirname, '__fixtures__/build-snap'), + undefined, + ).then(String), + ).resolves.toEqual('file\n'); + await expect( + readOutputHTMLFile( + '/folder/', + path.join(__dirname, '__fixtures__/build-snap'), + undefined, + ).then(String), + ).resolves.toEqual('folder\n'); + }); + test('trailing slash true', async () => { + await expect( + readOutputHTMLFile( + '/folder', + path.join(__dirname, '__fixtures__/build-snap'), + true, + ).then(String), + ).resolves.toEqual('folder\n'); + await expect( + readOutputHTMLFile( + '/folder/', + path.join(__dirname, '__fixtures__/build-snap'), + true, + ).then(String), + ).resolves.toEqual('folder\n'); + }); + test('trailing slash false', async () => { + await expect( + readOutputHTMLFile( + '/file', + path.join(__dirname, '__fixtures__/build-snap'), + false, + ).then(String), + ).resolves.toEqual('file\n'); + await expect( + readOutputHTMLFile( + '/file/', + path.join(__dirname, '__fixtures__/build-snap'), + false, + ).then(String), + ).resolves.toEqual('file\n'); + }); +}); + +test('generate', async () => { + const writeMock = jest.spyOn(fs, 'writeFile').mockImplementation(() => {}); + const existsMock = jest.spyOn(fs, 'pathExists'); + const readMock = jest.spyOn(fs, 'readFile'); + + // First call: no file, no cache + existsMock.mockImplementationOnce(() => false); + await generate(__dirname, 'foo', 'bar'); + expect(writeMock).toHaveBeenNthCalledWith( + 1, + path.join(__dirname, 'foo'), + 'bar', + ); + + // Second call: cache exists + await generate(__dirname, 'foo', 'bar'); + expect(writeMock).toBeCalledTimes(1); + + // Generate another: file exists, cache doesn't + existsMock.mockImplementationOnce(() => true); + // @ts-expect-error: seems the typedef doesn't understand overload + readMock.mockImplementationOnce(() => Promise.resolve('bar')); + await generate(__dirname, 'baz', 'bar'); + expect(writeMock).toBeCalledTimes(1); + + // Generate again: force skip cache + await generate(__dirname, 'foo', 'bar', true); + expect(writeMock).toHaveBeenNthCalledWith( + 2, + path.join(__dirname, 'foo'), + 'bar', + ); +}); diff --git a/packages/docusaurus-utils/src/__tests__/i18nUtils.test.ts b/packages/docusaurus-utils/src/__tests__/i18nUtils.test.ts new file mode 100644 index 0000000000..39fc8aae7b --- /dev/null +++ b/packages/docusaurus-utils/src/__tests__/i18nUtils.test.ts @@ -0,0 +1,91 @@ +/** + * Copyright (c) Facebook, Inc. and its affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + */ + +import { + mergeTranslations, + updateTranslationFileMessages, + getPluginI18nPath, +} from '../i18nUtils'; + +test('mergeTranslations', () => { + expect( + mergeTranslations([ + { + T1: {message: 'T1 message', description: 'T1 desc'}, + T2: {message: 'T2 message', description: 'T2 desc'}, + T3: {message: 'T3 message', description: 'T3 desc'}, + }, + { + T4: {message: 'T4 message', description: 'T4 desc'}, + }, + {T2: {message: 'T2 message 2', description: 'T2 desc 2'}}, + ]), + ).toEqual({ + T1: {message: 'T1 message', description: 'T1 desc'}, + T2: {message: 'T2 message 2', description: 'T2 desc 2'}, + T3: {message: 'T3 message', description: 'T3 desc'}, + T4: {message: 'T4 message', description: 'T4 desc'}, + }); +}); + +test('updateTranslationFileMessages', () => { + expect( + updateTranslationFileMessages( + { + path: 'abc', + content: { + t1: {message: 't1 message', description: 't1 desc'}, + t2: {message: 't2 message', description: 't2 desc'}, + t3: {message: 't3 message', description: 't3 desc'}, + }, + }, + (message) => `prefix ${message} suffix`, + ), + ).toEqual({ + path: 'abc', + content: { + t1: {message: 'prefix t1 message suffix', description: 't1 desc'}, + t2: {message: 'prefix t2 message suffix', description: 't2 desc'}, + t3: {message: 'prefix t3 message suffix', description: 't3 desc'}, + }, + }); +}); + +describe('getPluginI18nPath', () => { + test('gets correct path', () => { + expect( + getPluginI18nPath({ + siteDir: __dirname, + locale: 'zh-Hans', + pluginName: 'plugin-content-docs', + pluginId: 'community', + subPaths: ['foo'], + }), + ).toMatchInlineSnapshot( + `"/packages/docusaurus-utils/src/__tests__/i18n/zh-Hans/plugin-content-docs-community/foo"`, + ); + }); + test('gets correct path for default plugin', () => { + expect( + getPluginI18nPath({ + siteDir: __dirname, + locale: 'zh-Hans', + pluginName: 'plugin-content-docs', + subPaths: ['foo'], + }).replace(__dirname, ''), + ).toMatchInlineSnapshot(`"/i18n/zh-Hans/plugin-content-docs/foo"`); + }); + test('gets correct path when no subpaths', () => { + expect( + getPluginI18nPath({ + siteDir: __dirname, + locale: 'zh-Hans', + pluginName: 'plugin-content-docs', + }).replace(__dirname, ''), + ).toMatchInlineSnapshot(`"/i18n/zh-Hans/plugin-content-docs"`); + }); +}); diff --git a/packages/docusaurus-utils/src/__tests__/index.test.ts b/packages/docusaurus-utils/src/__tests__/index.test.ts deleted file mode 100644 index 8dbc0bc117..0000000000 --- a/packages/docusaurus-utils/src/__tests__/index.test.ts +++ /dev/null @@ -1,529 +0,0 @@ -/** - * Copyright (c) Facebook, Inc. and its affiliates. - * - * This source code is licensed under the MIT license found in the - * LICENSE file in the root directory of this source tree. - */ - -import { - fileToPath, - genChunkName, - isValidPathname, - addTrailingSlash, - removeTrailingSlash, - removeSuffix, - removePrefix, - addLeadingSlash, - getElementsAround, - mergeTranslations, - mapAsyncSequential, - findAsyncSequential, - updateTranslationFileMessages, - encodePath, - addTrailingPathSeparator, - resolvePathname, - getPluginI18nPath, - generate, - reportMessage, - posixPath, - readOutputHTMLFile, -} from '../index'; -import _ from 'lodash'; -import fs from 'fs-extra'; -import path from 'path'; - -describe('load utils', () => { - test('fileToPath', () => { - const asserts: Record = { - 'index.md': '/', - 'hello/index.md': '/hello/', - 'foo.md': '/foo', - 'foo/bar.md': '/foo/bar', - 'index.js': '/', - 'hello/index.js': '/hello/', - 'foo.js': '/foo', - 'foo/bar.js': '/foo/bar', - }; - Object.keys(asserts).forEach((file) => { - expect(fileToPath(file)).toBe(asserts[file]); - }); - }); - - test('encodePath', () => { - expect(encodePath('a/foo/')).toEqual('a/foo/'); - expect(encodePath('a//')).toEqual('a/%3Cfoo%3E/'); - expect(encodePath('a/你好/')).toEqual('a/%E4%BD%A0%E5%A5%BD/'); - }); - - test('genChunkName', () => { - const firstAssert: Record = { - '/docs/adding-blog': 'docs-adding-blog-062', - '/docs/versioning': 'docs-versioning-8a8', - '/': 'index', - '/blog/2018/04/30/How-I-Converted-Profilo-To-Docusaurus': - 'blog-2018-04-30-how-i-converted-profilo-to-docusaurus-4f2', - '/youtube': 'youtube-429', - '/users/en/': 'users-en-f7a', - '/blog': 'blog-c06', - }; - Object.keys(firstAssert).forEach((str) => { - expect(genChunkName(str)).toBe(firstAssert[str]); - }); - - // Don't allow different chunk name for same path. - expect(genChunkName('path/is/similar', 'oldPrefix')).toEqual( - genChunkName('path/is/similar', 'newPrefix'), - ); - - // Even with same preferred name, still different chunk name for - // different path - const secondAssert: Record = { - '/blog/1': 'blog-85-f-089', - '/blog/2': 'blog-353-489', - }; - Object.keys(secondAssert).forEach((str) => { - expect(genChunkName(str, undefined, 'blog')).toBe(secondAssert[str]); - }); - - // Only generate short unique id - const thirdAssert: Record = { - a: '0cc175b9', - b: '92eb5ffe', - c: '4a8a08f0', - d: '8277e091', - }; - Object.keys(thirdAssert).forEach((str) => { - expect(genChunkName(str, undefined, undefined, true)).toBe( - thirdAssert[str], - ); - }); - expect(genChunkName('d', undefined, undefined, true)).toBe('8277e091'); - }); - - test('addTrailingPathSeparator', () => { - expect(addTrailingPathSeparator('foo')).toEqual( - process.platform === 'win32' ? 'foo\\' : 'foo/', - ); - expect(addTrailingPathSeparator('foo/')).toEqual( - process.platform === 'win32' ? 'foo\\' : 'foo/', - ); - }); - - test('resolvePathname', () => { - // These tests are directly copied from https://github.com/mjackson/resolve-pathname/blob/master/modules/__tests__/resolvePathname-test.js - // Maybe we want to wrap that logic in the future? - expect(resolvePathname('c')).toEqual('c'); - expect(resolvePathname('c', 'a/b')).toEqual('a/c'); - expect(resolvePathname('/c', '/a/b')).toEqual('/c'); - expect(resolvePathname('', '/a/b')).toEqual('/a/b'); - expect(resolvePathname('../c', '/a/b')).toEqual('/c'); - expect(resolvePathname('c', '/a/b')).toEqual('/a/c'); - expect(resolvePathname('c', '/a/')).toEqual('/a/c'); - expect(resolvePathname('..', '/a/b')).toEqual('/'); - }); - - test('isValidPathname', () => { - expect(isValidPathname('/')).toBe(true); - expect(isValidPathname('/hey')).toBe(true); - expect(isValidPathname('/hey/ho')).toBe(true); - expect(isValidPathname('/hey/ho/')).toBe(true); - expect(isValidPathname('/hey/h%C3%B4/')).toBe(true); - expect(isValidPathname('/hey///ho///')).toBe(true); // Unexpected but valid - expect(isValidPathname('/hey/héllô you')).toBe(true); - - expect(isValidPathname('')).toBe(false); - expect(isValidPathname('hey')).toBe(false); - expect(isValidPathname('/hey?qs=ho')).toBe(false); - expect(isValidPathname('https://fb.com/hey')).toBe(false); - expect(isValidPathname('//hey')).toBe(false); - expect(isValidPathname('////')).toBe(false); - }); -}); - -describe('generate', () => { - test('behaves correctly', async () => { - const writeMock = jest.spyOn(fs, 'writeFile').mockImplementation(() => {}); - const existsMock = jest.spyOn(fs, 'pathExists'); - const readMock = jest.spyOn(fs, 'readFile'); - - // First call: no file, no cache - existsMock.mockImplementationOnce(() => false); - await generate(__dirname, 'foo', 'bar'); - expect(writeMock).toHaveBeenNthCalledWith( - 1, - path.join(__dirname, 'foo'), - 'bar', - ); - - // Second call: cache exists - await generate(__dirname, 'foo', 'bar'); - expect(writeMock).toBeCalledTimes(1); - - // Generate another: file exists, cache doesn't - existsMock.mockImplementationOnce(() => true); - // @ts-expect-error: seems the typedef doesn't understand overload - readMock.mockImplementationOnce(() => Promise.resolve('bar')); - await generate(__dirname, 'baz', 'bar'); - expect(writeMock).toBeCalledTimes(1); - - // Generate again: force skip cache - await generate(__dirname, 'foo', 'bar', true); - expect(writeMock).toHaveBeenNthCalledWith( - 2, - path.join(__dirname, 'foo'), - 'bar', - ); - }); -}); - -describe('addTrailingSlash', () => { - test('should no-op', () => { - expect(addTrailingSlash('/abcd/')).toEqual('/abcd/'); - }); - test('should add /', () => { - expect(addTrailingSlash('/abcd')).toEqual('/abcd/'); - }); -}); - -describe('addLeadingSlash', () => { - test('should no-op', () => { - expect(addLeadingSlash('/abc')).toEqual('/abc'); - }); - test('should add /', () => { - expect(addLeadingSlash('abc')).toEqual('/abc'); - }); -}); - -describe('removeTrailingSlash', () => { - test('should no-op', () => { - expect(removeTrailingSlash('/abcd')).toEqual('/abcd'); - }); - test('should remove /', () => { - expect(removeTrailingSlash('/abcd/')).toEqual('/abcd'); - }); -}); - -describe('removeSuffix', () => { - test('should no-op 1', () => { - expect(removeSuffix('abcdef', 'ijk')).toEqual('abcdef'); - }); - test('should no-op 2', () => { - expect(removeSuffix('abcdef', 'abc')).toEqual('abcdef'); - }); - test('should no-op 3', () => { - expect(removeSuffix('abcdef', '')).toEqual('abcdef'); - }); - test('should remove suffix', () => { - expect(removeSuffix('abcdef', 'ef')).toEqual('abcd'); - }); -}); - -describe('removePrefix', () => { - test('should no-op 1', () => { - expect(removePrefix('abcdef', 'ijk')).toEqual('abcdef'); - }); - test('should no-op 2', () => { - expect(removePrefix('abcdef', 'def')).toEqual('abcdef'); - }); - test('should no-op 3', () => { - expect(removePrefix('abcdef', '')).toEqual('abcdef'); - }); - test('should remove prefix', () => { - expect(removePrefix('abcdef', 'ab')).toEqual('cdef'); - }); -}); - -describe('getElementsAround', () => { - test('can return elements around', () => { - expect(getElementsAround(['a', 'b', 'c', 'd'], 0)).toEqual({ - previous: undefined, - next: 'b', - }); - expect(getElementsAround(['a', 'b', 'c', 'd'], 1)).toEqual({ - previous: 'a', - next: 'c', - }); - expect(getElementsAround(['a', 'b', 'c', 'd'], 2)).toEqual({ - previous: 'b', - next: 'd', - }); - expect(getElementsAround(['a', 'b', 'c', 'd'], 3)).toEqual({ - previous: 'c', - next: undefined, - }); - }); - - test('throws if bad index is provided', () => { - expect(() => - getElementsAround(['a', 'b', 'c', 'd'], -1), - ).toThrowErrorMatchingInlineSnapshot( - `"Valid \\"aroundIndex\\" for array (of size 4) are between 0 and 3, but you provided -1."`, - ); - expect(() => - getElementsAround(['a', 'b', 'c', 'd'], 4), - ).toThrowErrorMatchingInlineSnapshot( - `"Valid \\"aroundIndex\\" for array (of size 4) are between 0 and 3, but you provided 4."`, - ); - }); -}); - -describe('mergeTranslations', () => { - test('should merge translations', () => { - expect( - mergeTranslations([ - { - T1: {message: 'T1 message', description: 'T1 desc'}, - T2: {message: 'T2 message', description: 'T2 desc'}, - T3: {message: 'T3 message', description: 'T3 desc'}, - }, - { - T4: {message: 'T4 message', description: 'T4 desc'}, - }, - {T2: {message: 'T2 message 2', description: 'T2 desc 2'}}, - ]), - ).toEqual({ - T1: {message: 'T1 message', description: 'T1 desc'}, - T2: {message: 'T2 message 2', description: 'T2 desc 2'}, - T3: {message: 'T3 message', description: 'T3 desc'}, - T4: {message: 'T4 message', description: 'T4 desc'}, - }); - }); -}); - -describe('mapAsyncSequential', () => { - function sleep(timeout: number): Promise { - return new Promise((resolve) => { - setTimeout(resolve, timeout); - }); - } - - test('map sequentially', async () => { - const itemToTimeout: Record = { - '1': 50, - '2': 150, - '3': 100, - }; - const items = Object.keys(itemToTimeout); - - const itemMapStartsAt: Record = {}; - const itemMapEndsAt: Record = {}; - - const timeBefore = Date.now(); - await expect( - mapAsyncSequential(items, async (item) => { - const itemTimeout = itemToTimeout[item]; - itemMapStartsAt[item] = Date.now(); - await sleep(itemTimeout); - itemMapEndsAt[item] = Date.now(); - return `${item} mapped`; - }), - ).resolves.toEqual(['1 mapped', '2 mapped', '3 mapped']); - const timeAfter = Date.now(); - - const timeTotal = timeAfter - timeBefore; - - const totalTimeouts = _.sum(Object.values(itemToTimeout)); - expect(timeTotal).toBeGreaterThanOrEqual(totalTimeouts - 20); - - expect(itemMapStartsAt['1']).toBeGreaterThanOrEqual(0); - expect(itemMapStartsAt['2']).toBeGreaterThanOrEqual( - itemMapEndsAt['1'] - 20, - ); - expect(itemMapStartsAt['3']).toBeGreaterThanOrEqual( - itemMapEndsAt['2'] - 20, - ); - }); -}); - -describe('findAsyncSequential', () => { - function sleep(timeout: number): Promise { - return new Promise((resolve) => { - setTimeout(resolve, timeout); - }); - } - - test('find sequentially', async () => { - const items = ['1', '2', '3']; - - const findFn = jest.fn(async (item: string) => { - await sleep(50); - return item === '2'; - }); - - const timeBefore = Date.now(); - await expect(findAsyncSequential(items, findFn)).resolves.toEqual('2'); - const timeAfter = Date.now(); - - expect(findFn).toHaveBeenCalledTimes(2); - expect(findFn).toHaveBeenNthCalledWith(1, '1'); - expect(findFn).toHaveBeenNthCalledWith(2, '2'); - - const timeTotal = timeAfter - timeBefore; - expect(timeTotal).toBeGreaterThanOrEqual(80); - expect(timeTotal).toBeLessThan(120); - }); -}); - -describe('readOutputHTMLFile', () => { - test('trailing slash undefined', async () => { - await expect( - readOutputHTMLFile( - '/file', - path.join(__dirname, '__fixtures__/build-snap'), - undefined, - ).then(String), - ).resolves.toEqual('file\n'); - await expect( - readOutputHTMLFile( - '/folder', - path.join(__dirname, '__fixtures__/build-snap'), - undefined, - ).then(String), - ).resolves.toEqual('folder\n'); - await expect( - readOutputHTMLFile( - '/file/', - path.join(__dirname, '__fixtures__/build-snap'), - undefined, - ).then(String), - ).resolves.toEqual('file\n'); - await expect( - readOutputHTMLFile( - '/folder/', - path.join(__dirname, '__fixtures__/build-snap'), - undefined, - ).then(String), - ).resolves.toEqual('folder\n'); - }); - test('trailing slash true', async () => { - await expect( - readOutputHTMLFile( - '/folder', - path.join(__dirname, '__fixtures__/build-snap'), - true, - ).then(String), - ).resolves.toEqual('folder\n'); - await expect( - readOutputHTMLFile( - '/folder/', - path.join(__dirname, '__fixtures__/build-snap'), - true, - ).then(String), - ).resolves.toEqual('folder\n'); - }); - test('trailing slash false', async () => { - await expect( - readOutputHTMLFile( - '/file', - path.join(__dirname, '__fixtures__/build-snap'), - false, - ).then(String), - ).resolves.toEqual('file\n'); - await expect( - readOutputHTMLFile( - '/file/', - path.join(__dirname, '__fixtures__/build-snap'), - false, - ).then(String), - ).resolves.toEqual('file\n'); - }); -}); - -describe('updateTranslationFileMessages', () => { - test('should update messages', () => { - expect( - updateTranslationFileMessages( - { - path: 'abc', - content: { - t1: {message: 't1 message', description: 't1 desc'}, - t2: {message: 't2 message', description: 't2 desc'}, - t3: {message: 't3 message', description: 't3 desc'}, - }, - }, - (message) => `prefix ${message} suffix`, - ), - ).toEqual({ - path: 'abc', - content: { - t1: {message: 'prefix t1 message suffix', description: 't1 desc'}, - t2: {message: 'prefix t2 message suffix', description: 't2 desc'}, - t3: {message: 'prefix t3 message suffix', description: 't3 desc'}, - }, - }); - }); -}); - -describe('getPluginI18nPath', () => { - test('gets correct path', () => { - expect( - posixPath( - getPluginI18nPath({ - siteDir: __dirname, - locale: 'zh-Hans', - pluginName: 'plugin-content-docs', - pluginId: 'community', - subPaths: ['foo'], - }).replace(__dirname, ''), - ), - ).toEqual('/i18n/zh-Hans/plugin-content-docs-community/foo'); - }); - test('gets correct path for default plugin', () => { - expect( - posixPath( - getPluginI18nPath({ - siteDir: __dirname, - locale: 'zh-Hans', - pluginName: 'plugin-content-docs', - subPaths: ['foo'], - }).replace(__dirname, ''), - ), - ).toEqual('/i18n/zh-Hans/plugin-content-docs/foo'); - }); - test('gets correct path when no subpaths', () => { - expect( - posixPath( - getPluginI18nPath({ - siteDir: __dirname, - locale: 'zh-Hans', - pluginName: 'plugin-content-docs', - }).replace(__dirname, ''), - ), - ).toEqual('/i18n/zh-Hans/plugin-content-docs'); - }); -}); - -describe('reportMessage', () => { - test('all severities', () => { - const consoleLog = jest.spyOn(console, 'info').mockImplementation(() => {}); - const consoleWarn = jest - .spyOn(console, 'warn') - .mockImplementation(() => {}); - const consoleError = jest - .spyOn(console, 'error') - .mockImplementation(() => {}); - reportMessage('hey', 'ignore'); - reportMessage('hey', 'log'); - reportMessage('hey', 'warn'); - reportMessage('hey', 'error'); - expect(() => - reportMessage('hey', 'throw'), - ).toThrowErrorMatchingInlineSnapshot(`"hey"`); - expect(() => - // @ts-expect-error: for test - reportMessage('hey', 'foo'), - ).toThrowErrorMatchingInlineSnapshot( - `"Unexpected \\"reportingSeverity\\" value: foo."`, - ); - expect(consoleLog).toBeCalledTimes(1); - expect(consoleLog).toBeCalledWith(expect.stringMatching(/.*\[INFO].* hey/)); - expect(consoleWarn).toBeCalledTimes(1); - expect(consoleWarn).toBeCalledWith( - expect.stringMatching(/.*\[WARNING].* hey/), - ); - expect(consoleError).toBeCalledTimes(1); - expect(consoleError).toBeCalledWith( - expect.stringMatching(/.*\[ERROR].* hey/), - ); - }); -}); diff --git a/packages/docusaurus-utils/src/__tests__/jsUtils.test.ts b/packages/docusaurus-utils/src/__tests__/jsUtils.test.ts new file mode 100644 index 0000000000..53bc48469a --- /dev/null +++ b/packages/docusaurus-utils/src/__tests__/jsUtils.test.ts @@ -0,0 +1,189 @@ +/** + * Copyright (c) Facebook, Inc. and its affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + */ + +import { + removeSuffix, + removePrefix, + getElementsAround, + mapAsyncSequential, + findAsyncSequential, + reportMessage, +} from '../jsUtils'; +import _ from 'lodash'; + +describe('removeSuffix', () => { + test('should no-op 1', () => { + expect(removeSuffix('abcdef', 'ijk')).toEqual('abcdef'); + }); + test('should no-op 2', () => { + expect(removeSuffix('abcdef', 'abc')).toEqual('abcdef'); + }); + test('should no-op 3', () => { + expect(removeSuffix('abcdef', '')).toEqual('abcdef'); + }); + test('should remove suffix', () => { + expect(removeSuffix('abcdef', 'ef')).toEqual('abcd'); + }); +}); + +describe('removePrefix', () => { + test('should no-op 1', () => { + expect(removePrefix('abcdef', 'ijk')).toEqual('abcdef'); + }); + test('should no-op 2', () => { + expect(removePrefix('abcdef', 'def')).toEqual('abcdef'); + }); + test('should no-op 3', () => { + expect(removePrefix('abcdef', '')).toEqual('abcdef'); + }); + test('should remove prefix', () => { + expect(removePrefix('abcdef', 'ab')).toEqual('cdef'); + }); +}); + +describe('getElementsAround', () => { + test('can return elements around', () => { + expect(getElementsAround(['a', 'b', 'c', 'd'], 0)).toEqual({ + previous: undefined, + next: 'b', + }); + expect(getElementsAround(['a', 'b', 'c', 'd'], 1)).toEqual({ + previous: 'a', + next: 'c', + }); + expect(getElementsAround(['a', 'b', 'c', 'd'], 2)).toEqual({ + previous: 'b', + next: 'd', + }); + expect(getElementsAround(['a', 'b', 'c', 'd'], 3)).toEqual({ + previous: 'c', + next: undefined, + }); + }); + + test('throws if bad index is provided', () => { + expect(() => + getElementsAround(['a', 'b', 'c', 'd'], -1), + ).toThrowErrorMatchingInlineSnapshot( + `"Valid \\"aroundIndex\\" for array (of size 4) are between 0 and 3, but you provided -1."`, + ); + expect(() => + getElementsAround(['a', 'b', 'c', 'd'], 4), + ).toThrowErrorMatchingInlineSnapshot( + `"Valid \\"aroundIndex\\" for array (of size 4) are between 0 and 3, but you provided 4."`, + ); + }); +}); + +describe('mapAsyncSequential', () => { + function sleep(timeout: number): Promise { + return new Promise((resolve) => { + setTimeout(resolve, timeout); + }); + } + + test('map sequentially', async () => { + const itemToTimeout: Record = { + '1': 50, + '2': 150, + '3': 100, + }; + const items = Object.keys(itemToTimeout); + + const itemMapStartsAt: Record = {}; + const itemMapEndsAt: Record = {}; + + const timeBefore = Date.now(); + await expect( + mapAsyncSequential(items, async (item) => { + const itemTimeout = itemToTimeout[item]; + itemMapStartsAt[item] = Date.now(); + await sleep(itemTimeout); + itemMapEndsAt[item] = Date.now(); + return `${item} mapped`; + }), + ).resolves.toEqual(['1 mapped', '2 mapped', '3 mapped']); + const timeAfter = Date.now(); + + const timeTotal = timeAfter - timeBefore; + + const totalTimeouts = _.sum(Object.values(itemToTimeout)); + expect(timeTotal).toBeGreaterThanOrEqual(totalTimeouts - 20); + + expect(itemMapStartsAt['1']).toBeGreaterThanOrEqual(0); + expect(itemMapStartsAt['2']).toBeGreaterThanOrEqual( + itemMapEndsAt['1'] - 20, + ); + expect(itemMapStartsAt['3']).toBeGreaterThanOrEqual( + itemMapEndsAt['2'] - 20, + ); + }); +}); + +describe('findAsyncSequential', () => { + function sleep(timeout: number): Promise { + return new Promise((resolve) => { + setTimeout(resolve, timeout); + }); + } + + test('find sequentially', async () => { + const items = ['1', '2', '3']; + + const findFn = jest.fn(async (item: string) => { + await sleep(50); + return item === '2'; + }); + + const timeBefore = Date.now(); + await expect(findAsyncSequential(items, findFn)).resolves.toEqual('2'); + const timeAfter = Date.now(); + + expect(findFn).toHaveBeenCalledTimes(2); + expect(findFn).toHaveBeenNthCalledWith(1, '1'); + expect(findFn).toHaveBeenNthCalledWith(2, '2'); + + const timeTotal = timeAfter - timeBefore; + expect(timeTotal).toBeGreaterThanOrEqual(80); + expect(timeTotal).toBeLessThan(120); + }); +}); + +describe('reportMessage', () => { + test('all severities', () => { + const consoleLog = jest.spyOn(console, 'info').mockImplementation(() => {}); + const consoleWarn = jest + .spyOn(console, 'warn') + .mockImplementation(() => {}); + const consoleError = jest + .spyOn(console, 'error') + .mockImplementation(() => {}); + reportMessage('hey', 'ignore'); + reportMessage('hey', 'log'); + reportMessage('hey', 'warn'); + reportMessage('hey', 'error'); + expect(() => + reportMessage('hey', 'throw'), + ).toThrowErrorMatchingInlineSnapshot(`"hey"`); + expect(() => + // @ts-expect-error: for test + reportMessage('hey', 'foo'), + ).toThrowErrorMatchingInlineSnapshot( + `"Unexpected \\"reportingSeverity\\" value: foo."`, + ); + expect(consoleLog).toBeCalledTimes(1); + expect(consoleLog).toBeCalledWith(expect.stringMatching(/.*\[INFO].* hey/)); + expect(consoleWarn).toBeCalledTimes(1); + expect(consoleWarn).toBeCalledWith( + expect.stringMatching(/.*\[WARNING].* hey/), + ); + expect(consoleError).toBeCalledTimes(1); + expect(consoleError).toBeCalledWith( + expect.stringMatching(/.*\[ERROR].* hey/), + ); + }); +}); diff --git a/packages/docusaurus-utils/src/__tests__/pathUtils.test.ts b/packages/docusaurus-utils/src/__tests__/pathUtils.test.ts index 0c3666d137..96089fa13a 100644 --- a/packages/docusaurus-utils/src/__tests__/pathUtils.test.ts +++ b/packages/docusaurus-utils/src/__tests__/pathUtils.test.ts @@ -12,6 +12,7 @@ import { posixPath, aliasedSitePath, toMessageRelativeFilePath, + addTrailingPathSeparator, } from '../pathUtils'; import path from 'path'; @@ -116,63 +117,63 @@ describe('shortName', () => { }); }); -describe('toMessageRelativeFilePath', () => { - test('behaves correctly', () => { - jest - .spyOn(process, 'cwd') - .mockImplementationOnce(() => path.join(__dirname, '..')); - expect( - toMessageRelativeFilePath(path.join(__dirname, 'foo/bar.js')), - ).toEqual('__tests__/foo/bar.js'); +test('toMessageRelativeFilePath', () => { + jest + .spyOn(process, 'cwd') + .mockImplementationOnce(() => path.join(__dirname, '..')); + expect(toMessageRelativeFilePath(path.join(__dirname, 'foo/bar.js'))).toEqual( + '__tests__/foo/bar.js', + ); +}); + +test('escapePath', () => { + const asserts: Record = { + 'c:/aaaa\\bbbb': 'c:/aaaa\\\\bbbb', + 'c:\\aaaa\\bbbb\\★': 'c:\\\\aaaa\\\\bbbb\\\\★', + '\\\\?\\c:\\aaaa\\bbbb': '\\\\\\\\?\\\\c:\\\\aaaa\\\\bbbb', + 'c:\\aaaa\\bbbb': 'c:\\\\aaaa\\\\bbbb', + 'foo\\bar': 'foo\\\\bar', + 'foo\\bar/lol': 'foo\\\\bar/lol', + 'website\\docs/**/*.{md,mdx}': 'website\\\\docs/**/*.{md,mdx}', + }; + Object.keys(asserts).forEach((file) => { + expect(escapePath(file)).toBe(asserts[file]); }); }); -describe('escapePath', () => { - test('escapePath works', () => { - const asserts: Record = { - 'c:/aaaa\\bbbb': 'c:/aaaa\\\\bbbb', - 'c:\\aaaa\\bbbb\\★': 'c:\\\\aaaa\\\\bbbb\\\\★', - '\\\\?\\c:\\aaaa\\bbbb': '\\\\\\\\?\\\\c:\\\\aaaa\\\\bbbb', - 'c:\\aaaa\\bbbb': 'c:\\\\aaaa\\\\bbbb', - 'foo\\bar': 'foo\\\\bar', - 'foo\\bar/lol': 'foo\\\\bar/lol', - 'website\\docs/**/*.{md,mdx}': 'website\\\\docs/**/*.{md,mdx}', - }; - Object.keys(asserts).forEach((file) => { - expect(escapePath(file)).toBe(asserts[file]); - }); +test('posixPath', () => { + const asserts: Record = { + 'c:/aaaa\\bbbb': 'c:/aaaa/bbbb', + 'c:\\aaaa\\bbbb\\★': 'c:\\aaaa\\bbbb\\★', + '\\\\?\\c:\\aaaa\\bbbb': '\\\\?\\c:\\aaaa\\bbbb', + 'c:\\aaaa\\bbbb': 'c:/aaaa/bbbb', + 'foo\\bar': 'foo/bar', + 'foo\\bar/lol': 'foo/bar/lol', + 'website\\docs/**/*.{md,mdx}': 'website/docs/**/*.{md,mdx}', + }; + Object.keys(asserts).forEach((file) => { + expect(posixPath(file)).toBe(asserts[file]); }); }); -describe('posixPath', () => { - test('posixPath works', () => { - const asserts: Record = { - 'c:/aaaa\\bbbb': 'c:/aaaa/bbbb', - 'c:\\aaaa\\bbbb\\★': 'c:\\aaaa\\bbbb\\★', - '\\\\?\\c:\\aaaa\\bbbb': '\\\\?\\c:\\aaaa\\bbbb', - 'c:\\aaaa\\bbbb': 'c:/aaaa/bbbb', - 'foo\\bar': 'foo/bar', - 'foo\\bar/lol': 'foo/bar/lol', - 'website\\docs/**/*.{md,mdx}': 'website/docs/**/*.{md,mdx}', - }; - Object.keys(asserts).forEach((file) => { - expect(posixPath(file)).toBe(asserts[file]); - }); +test('aliasedSitePath', () => { + const asserts: Record = { + 'user/website/docs/asd.md': '@site/docs/asd.md', + 'user/website/versioned_docs/foo/bar.md': '@site/versioned_docs/foo/bar.md', + 'user/docs/test.md': '@site/../docs/test.md', + }; + Object.keys(asserts).forEach((file) => { + expect(posixPath(aliasedSitePath(file, 'user/website'))).toBe( + asserts[file], + ); }); }); -describe('aliasedSitePath', () => { - test('behaves correctly', () => { - const asserts: Record = { - 'user/website/docs/asd.md': '@site/docs/asd.md', - 'user/website/versioned_docs/foo/bar.md': - '@site/versioned_docs/foo/bar.md', - 'user/docs/test.md': '@site/../docs/test.md', - }; - Object.keys(asserts).forEach((file) => { - expect(posixPath(aliasedSitePath(file, 'user/website'))).toBe( - asserts[file], - ); - }); - }); +test('addTrailingPathSeparator', () => { + expect(addTrailingPathSeparator('foo')).toEqual( + process.platform === 'win32' ? 'foo\\' : 'foo/', + ); + expect(addTrailingPathSeparator('foo/')).toEqual( + process.platform === 'win32' ? 'foo\\' : 'foo/', + ); }); diff --git a/packages/docusaurus-utils/src/__tests__/urlUtils.test.ts b/packages/docusaurus-utils/src/__tests__/urlUtils.test.ts index 3acd892bbe..22410a8e76 100644 --- a/packages/docusaurus-utils/src/__tests__/urlUtils.test.ts +++ b/packages/docusaurus-utils/src/__tests__/urlUtils.test.ts @@ -5,7 +5,17 @@ * LICENSE file in the root directory of this source tree. */ -import {normalizeUrl, getEditUrl} from '../urlUtils'; +import { + normalizeUrl, + getEditUrl, + fileToPath, + isValidPathname, + addTrailingSlash, + addLeadingSlash, + removeTrailingSlash, + resolvePathname, + encodePath, +} from '../urlUtils'; describe('normalizeUrl', () => { test('should normalize urls correctly', () => { @@ -150,3 +160,82 @@ describe('getEditUrl', () => { expect(getEditUrl('foo/bar.md')).toBeUndefined(); }); }); + +test('fileToPath', () => { + const asserts: Record = { + 'index.md': '/', + 'hello/index.md': '/hello/', + 'foo.md': '/foo', + 'foo/bar.md': '/foo/bar', + 'index.js': '/', + 'hello/index.js': '/hello/', + 'foo.js': '/foo', + 'foo/bar.js': '/foo/bar', + }; + Object.keys(asserts).forEach((file) => { + expect(fileToPath(file)).toBe(asserts[file]); + }); +}); + +test('isValidPathname', () => { + expect(isValidPathname('/')).toBe(true); + expect(isValidPathname('/hey')).toBe(true); + expect(isValidPathname('/hey/ho')).toBe(true); + expect(isValidPathname('/hey/ho/')).toBe(true); + expect(isValidPathname('/hey/h%C3%B4/')).toBe(true); + expect(isValidPathname('/hey///ho///')).toBe(true); // Unexpected but valid + expect(isValidPathname('/hey/héllô you')).toBe(true); + + expect(isValidPathname('')).toBe(false); + expect(isValidPathname('hey')).toBe(false); + expect(isValidPathname('/hey?qs=ho')).toBe(false); + expect(isValidPathname('https://fb.com/hey')).toBe(false); + expect(isValidPathname('//hey')).toBe(false); + expect(isValidPathname('////')).toBe(false); +}); + +describe('addTrailingSlash', () => { + test('should no-op', () => { + expect(addTrailingSlash('/abcd/')).toEqual('/abcd/'); + }); + test('should add /', () => { + expect(addTrailingSlash('/abcd')).toEqual('/abcd/'); + }); +}); + +describe('addLeadingSlash', () => { + test('should no-op', () => { + expect(addLeadingSlash('/abc')).toEqual('/abc'); + }); + test('should add /', () => { + expect(addLeadingSlash('abc')).toEqual('/abc'); + }); +}); + +describe('removeTrailingSlash', () => { + test('should no-op', () => { + expect(removeTrailingSlash('/abcd')).toEqual('/abcd'); + }); + test('should remove /', () => { + expect(removeTrailingSlash('/abcd/')).toEqual('/abcd'); + }); +}); + +test('resolvePathname', () => { + // These tests are directly copied from https://github.com/mjackson/resolve-pathname/blob/master/modules/__tests__/resolvePathname-test.js + // Maybe we want to wrap that logic in the future? + expect(resolvePathname('c')).toEqual('c'); + expect(resolvePathname('c', 'a/b')).toEqual('a/c'); + expect(resolvePathname('/c', '/a/b')).toEqual('/c'); + expect(resolvePathname('', '/a/b')).toEqual('/a/b'); + expect(resolvePathname('../c', '/a/b')).toEqual('/c'); + expect(resolvePathname('c', '/a/b')).toEqual('/a/c'); + expect(resolvePathname('c', '/a/')).toEqual('/a/c'); + expect(resolvePathname('..', '/a/b')).toEqual('/'); +}); + +test('encodePath', () => { + expect(encodePath('a/foo/')).toEqual('a/foo/'); + expect(encodePath('a//')).toEqual('a/%3Cfoo%3E/'); + expect(encodePath('a/你好/')).toEqual('a/%E4%BD%A0%E5%A5%BD/'); +}); diff --git a/packages/docusaurus-utils/src/emitUtils.ts b/packages/docusaurus-utils/src/emitUtils.ts new file mode 100644 index 0000000000..bb71a99f48 --- /dev/null +++ b/packages/docusaurus-utils/src/emitUtils.ts @@ -0,0 +1,113 @@ +/** + * Copyright (c) Facebook, Inc. and its affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + */ + +import path from 'path'; +import fs from 'fs-extra'; +import {createHash} from 'crypto'; +import {simpleHash, docuHash} from './hashUtils'; +import {findAsyncSequential} from './jsUtils'; + +const fileHash = new Map(); + +export async function generate( + generatedFilesDir: string, + file: string, + content: string, + skipCache: boolean = process.env.NODE_ENV === 'production', +): Promise { + const filepath = path.join(generatedFilesDir, file); + + if (skipCache) { + await fs.ensureDir(path.dirname(filepath)); + await fs.writeFile(filepath, content); + return; + } + + let lastHash = fileHash.get(filepath); + + // If file already exists but its not in runtime cache yet, + // we try to calculate the content hash and then compare + // This is to avoid unnecessary overwriting and we can reuse old file. + if (!lastHash && (await fs.pathExists(filepath))) { + const lastContent = await fs.readFile(filepath, 'utf8'); + lastHash = createHash('md5').update(lastContent).digest('hex'); + fileHash.set(filepath, lastHash); + } + + const currentHash = createHash('md5').update(content).digest('hex'); + + if (lastHash !== currentHash) { + await fs.ensureDir(path.dirname(filepath)); + await fs.writeFile(filepath, content); + fileHash.set(filepath, currentHash); + } +} + +const chunkNameCache = new Map(); + +/** + * Generate unique chunk name given a module path. + */ +export function genChunkName( + modulePath: string, + prefix?: string, + preferredName?: string, + shortId: boolean = process.env.NODE_ENV === 'production', +): string { + let chunkName: string | undefined = chunkNameCache.get(modulePath); + if (!chunkName) { + if (shortId) { + chunkName = simpleHash(modulePath, 8); + } else { + let str = modulePath; + if (preferredName) { + const shortHash = simpleHash(modulePath, 3); + str = `${preferredName}${shortHash}`; + } + const name = str === '/' ? 'index' : docuHash(str); + chunkName = prefix ? `${prefix}---${name}` : name; + } + chunkNameCache.set(modulePath, chunkName); + } + return chunkName; +} + +/** + * @param permalink The URL that the HTML file corresponds to, without base URL + * @param outDir Full path to the output directory + * @param trailingSlash The site config option. If provided, only one path will + * be read. + * @returns This returns a buffer, which you have to decode string yourself if + * needed. (Not always necessary since the output isn't for human consumption + * anyways, and most HTML manipulation libs accept buffers) + */ +export async function readOutputHTMLFile( + permalink: string, + outDir: string, + trailingSlash: boolean | undefined, +): Promise { + const withTrailingSlashPath = path.join(outDir, permalink, 'index.html'); + const withoutTrailingSlashPath = path.join( + outDir, + `${permalink.replace(/\/$/, '')}.html`, + ); + if (trailingSlash) { + return fs.readFile(withTrailingSlashPath); + } else if (trailingSlash === false) { + return fs.readFile(withoutTrailingSlashPath); + } + const HTMLPath = await findAsyncSequential( + [withTrailingSlashPath, withoutTrailingSlashPath], + fs.pathExists, + ); + if (!HTMLPath) { + throw new Error( + `Expected output HTML file to be found at ${withTrailingSlashPath}`, + ); + } + return fs.readFile(HTMLPath); +} diff --git a/packages/docusaurus-utils/src/i18nUtils.ts b/packages/docusaurus-utils/src/i18nUtils.ts new file mode 100644 index 0000000000..3e3005fcfe --- /dev/null +++ b/packages/docusaurus-utils/src/i18nUtils.ts @@ -0,0 +1,58 @@ +/** + * Copyright (c) Facebook, Inc. and its affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + */ + +import path from 'path'; +import _ from 'lodash'; +import type {TranslationFileContent, TranslationFile} from '@docusaurus/types'; +import {DEFAULT_PLUGIN_ID} from './constants'; + +export function mergeTranslations( + contents: TranslationFileContent[], +): TranslationFileContent { + return contents.reduce((acc, content) => ({...acc, ...content}), {}); +} + +// Useful to update all the messages of a translation file +// Used in tests to simulate translations +export function updateTranslationFileMessages( + translationFile: TranslationFile, + updateMessage: (message: string) => string, +): TranslationFile { + return { + ...translationFile, + content: _.mapValues(translationFile.content, (translation) => ({ + ...translation, + message: updateMessage(translation.message), + })), + }; +} + +export function getPluginI18nPath({ + siteDir, + locale, + pluginName, + pluginId = DEFAULT_PLUGIN_ID, + subPaths = [], +}: { + siteDir: string; + locale: string; + pluginName: string; + pluginId?: string | undefined; + subPaths?: string[]; +}): string { + return path.join( + siteDir, + 'i18n', + // namespace first by locale: convenient to work in a single folder for a + // translator + locale, + // Make it convenient to use for single-instance + // ie: return "docs", not "docs-default" nor "docs/default" + `${pluginName}${pluginId === DEFAULT_PLUGIN_ID ? '' : `-${pluginId}`}`, + ...subPaths, + ); +} diff --git a/packages/docusaurus-utils/src/index.ts b/packages/docusaurus-utils/src/index.ts index fdb3f6087d..9c66f3a9f4 100644 --- a/packages/docusaurus-utils/src/index.ts +++ b/packages/docusaurus-utils/src/index.ts @@ -5,23 +5,6 @@ * LICENSE file in the root directory of this source tree. */ -import logger from '@docusaurus/logger'; -import path from 'path'; -import {createHash} from 'crypto'; -import _ from 'lodash'; -import fs from 'fs-extra'; -import {URL} from 'url'; -import type { - ReportingSeverity, - TranslationFileContent, - TranslationFile, -} from '@docusaurus/types'; - -import resolvePathnameUnsafe from 'resolve-pathname'; - -import {simpleHash, docuHash} from './hashUtils'; -import {DEFAULT_PLUGIN_ID} from './constants'; - export { NODE_MAJOR_VERSION, NODE_MINOR_VERSION, @@ -37,8 +20,32 @@ export { DEFAULT_PLUGIN_ID, WEBPACK_URL_LOADER_LIMIT, } from './constants'; +export {generate, genChunkName, readOutputHTMLFile} from './emitUtils'; export {getFileCommitDate, GitNotFoundError} from './gitUtils'; -export {normalizeUrl, getEditUrl} from './urlUtils'; +export { + mergeTranslations, + updateTranslationFileMessages, + getPluginI18nPath, +} from './i18nUtils'; +export { + removeSuffix, + removePrefix, + getElementsAround, + mapAsyncSequential, + findAsyncSequential, + reportMessage, +} from './jsUtils'; +export { + normalizeUrl, + getEditUrl, + fileToPath, + encodePath, + isValidPathname, + resolvePathname, + addLeadingSlash, + addTrailingSlash, + removeTrailingSlash, +} from './urlUtils'; export { type Tag, type FrontMatterTag, @@ -69,6 +76,7 @@ export { toMessageRelativeFilePath, aliasedSitePath, escapePath, + addTrailingPathSeparator, } from './pathUtils'; export {md5Hash, simpleHash, docuHash} from './hashUtils'; export { @@ -85,285 +93,3 @@ export { findFolderContainingFile, getFolderContainingFile, } from './dataFileUtils'; - -const fileHash = new Map(); -export async function generate( - generatedFilesDir: string, - file: string, - content: string, - skipCache: boolean = process.env.NODE_ENV === 'production', -): Promise { - const filepath = path.join(generatedFilesDir, file); - - if (skipCache) { - await fs.ensureDir(path.dirname(filepath)); - await fs.writeFile(filepath, content); - return; - } - - let lastHash = fileHash.get(filepath); - - // If file already exists but its not in runtime cache yet, - // we try to calculate the content hash and then compare - // This is to avoid unnecessary overwriting and we can reuse old file. - if (!lastHash && (await fs.pathExists(filepath))) { - const lastContent = await fs.readFile(filepath, 'utf8'); - lastHash = createHash('md5').update(lastContent).digest('hex'); - fileHash.set(filepath, lastHash); - } - - const currentHash = createHash('md5').update(content).digest('hex'); - - if (lastHash !== currentHash) { - await fs.ensureDir(path.dirname(filepath)); - await fs.writeFile(filepath, content); - fileHash.set(filepath, currentHash); - } -} - -const indexRE = /(?^|.*\/)index\.(?:mdx?|jsx?|tsx?)$/i; -const extRE = /\.(?:mdx?|jsx?|tsx?)$/; - -/** - * Convert filepath to url path. - * Example: 'index.md' -> '/', 'foo/bar.js' -> '/foo/bar', - */ -export function fileToPath(file: string): string { - if (indexRE.test(file)) { - return file.replace(indexRE, '/$1'); - } - return `/${file.replace(extRE, '').replace(/\\/g, '/')}`; -} - -export function encodePath(userPath: string): string { - return userPath - .split('/') - .map((item) => encodeURIComponent(item)) - .join('/'); -} - -const chunkNameCache = new Map(); -/** - * Generate unique chunk name given a module path. - */ -export function genChunkName( - modulePath: string, - prefix?: string, - preferredName?: string, - shortId: boolean = process.env.NODE_ENV === 'production', -): string { - let chunkName: string | undefined = chunkNameCache.get(modulePath); - if (!chunkName) { - if (shortId) { - chunkName = simpleHash(modulePath, 8); - } else { - let str = modulePath; - if (preferredName) { - const shortHash = simpleHash(modulePath, 3); - str = `${preferredName}${shortHash}`; - } - const name = str === '/' ? 'index' : docuHash(str); - chunkName = prefix ? `${prefix}---${name}` : name; - } - chunkNameCache.set(modulePath, chunkName); - } - return chunkName; -} - -export function isValidPathname(str: string): boolean { - if (!str.startsWith('/')) { - return false; - } - try { - // weird, but is there a better way? - const parsedPathname = new URL(str, 'https://domain.com').pathname; - return parsedPathname === str || parsedPathname === encodeURI(str); - } catch { - return false; - } -} - -// resolve pathname and fail fast if resolution fails -export function resolvePathname(to: string, from?: string): string { - return resolvePathnameUnsafe(to, from); -} -export function addLeadingSlash(str: string): string { - return str.startsWith('/') ? str : `/${str}`; -} - -export function addTrailingPathSeparator(str: string): string { - return str.endsWith(path.sep) - ? str - : // If this is Windows, we need to change the forward slash to backward - `${str.replace(/\/$/, '')}${path.sep}`; -} - -// TODO deduplicate: also present in @docusaurus/utils-common -export function addTrailingSlash(str: string): string { - return str.endsWith('/') ? str : `${str}/`; -} -export function removeTrailingSlash(str: string): string { - return removeSuffix(str, '/'); -} - -export function removeSuffix(str: string, suffix: string): string { - if (suffix === '') { - return str; // always returns "" otherwise! - } - return str.endsWith(suffix) ? str.slice(0, -suffix.length) : str; -} - -export function removePrefix(str: string, prefix: string): string { - return str.startsWith(prefix) ? str.slice(prefix.length) : str; -} - -export function getElementsAround( - array: T[], - aroundIndex: number, -): { - next: T | undefined; - previous: T | undefined; -} { - const min = 0; - const max = array.length - 1; - if (aroundIndex < min || aroundIndex > max) { - throw new Error( - `Valid "aroundIndex" for array (of size ${array.length}) are between ${min} and ${max}, but you provided ${aroundIndex}.`, - ); - } - const previous = aroundIndex === min ? undefined : array[aroundIndex - 1]; - const next = aroundIndex === max ? undefined : array[aroundIndex + 1]; - return {previous, next}; -} - -export function getPluginI18nPath({ - siteDir, - locale, - pluginName, - pluginId = DEFAULT_PLUGIN_ID, - subPaths = [], -}: { - siteDir: string; - locale: string; - pluginName: string; - pluginId?: string | undefined; - subPaths?: string[]; -}): string { - return path.join( - siteDir, - 'i18n', - // namespace first by locale: convenient to work in a single folder for a - // translator - locale, - // Make it convenient to use for single-instance - // ie: return "docs", not "docs-default" nor "docs/default" - `${pluginName}${pluginId === DEFAULT_PLUGIN_ID ? '' : `-${pluginId}`}`, - ...subPaths, - ); -} - -/** - * @param permalink The URL that the HTML file corresponds to, without base URL - * @param outDir Full path to the output directory - * @param trailingSlash The site config option. If provided, only one path will - * be read. - * @returns This returns a buffer, which you have to decode string yourself if - * needed. (Not always necessary since the output isn't for human consumption - * anyways, and most HTML manipulation libs accept buffers) - */ -export async function readOutputHTMLFile( - permalink: string, - outDir: string, - trailingSlash: boolean | undefined, -): Promise { - const withTrailingSlashPath = path.join(outDir, permalink, 'index.html'); - const withoutTrailingSlashPath = path.join( - outDir, - `${permalink.replace(/\/$/, '')}.html`, - ); - if (trailingSlash) { - return fs.readFile(withTrailingSlashPath); - } else if (trailingSlash === false) { - return fs.readFile(withoutTrailingSlashPath); - } - const HTMLPath = await findAsyncSequential( - [withTrailingSlashPath, withoutTrailingSlashPath], - fs.pathExists, - ); - if (!HTMLPath) { - throw new Error( - `Expected output HTML file to be found at ${withTrailingSlashPath}`, - ); - } - return fs.readFile(HTMLPath); -} - -export async function mapAsyncSequential( - array: T[], - action: (t: T) => Promise, -): Promise { - const results: R[] = []; - for (const t of array) { - const result = await action(t); - results.push(result); - } - return results; -} - -export async function findAsyncSequential( - array: T[], - predicate: (t: T) => Promise, -): Promise { - for (const t of array) { - if (await predicate(t)) { - return t; - } - } - return undefined; -} - -export function reportMessage( - message: string, - reportingSeverity: ReportingSeverity, -): void { - switch (reportingSeverity) { - case 'ignore': - break; - case 'log': - logger.info(message); - break; - case 'warn': - logger.warn(message); - break; - case 'error': - logger.error(message); - break; - case 'throw': - throw new Error(message); - default: - throw new Error( - `Unexpected "reportingSeverity" value: ${reportingSeverity}.`, - ); - } -} - -export function mergeTranslations( - contents: TranslationFileContent[], -): TranslationFileContent { - return contents.reduce((acc, content) => ({...acc, ...content}), {}); -} - -// Useful to update all the messages of a translation file -// Used in tests to simulate translations -export function updateTranslationFileMessages( - translationFile: TranslationFile, - updateMessage: (message: string) => string, -): TranslationFile { - return { - ...translationFile, - content: _.mapValues(translationFile.content, (translation) => ({ - ...translation, - message: updateMessage(translation.message), - })), - }; -} diff --git a/packages/docusaurus-utils/src/jsUtils.ts b/packages/docusaurus-utils/src/jsUtils.ts new file mode 100644 index 0000000000..a0db09b3d1 --- /dev/null +++ b/packages/docusaurus-utils/src/jsUtils.ts @@ -0,0 +1,88 @@ +/** + * Copyright (c) Facebook, Inc. and its affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + */ + +import type {ReportingSeverity} from '@docusaurus/types'; +import logger from '@docusaurus/logger'; + +export function removeSuffix(str: string, suffix: string): string { + if (suffix === '') { + return str; // always returns "" otherwise! + } + return str.endsWith(suffix) ? str.slice(0, -suffix.length) : str; +} + +export function removePrefix(str: string, prefix: string): string { + return str.startsWith(prefix) ? str.slice(prefix.length) : str; +} + +export function getElementsAround( + array: T[], + aroundIndex: number, +): { + next: T | undefined; + previous: T | undefined; +} { + const min = 0; + const max = array.length - 1; + if (aroundIndex < min || aroundIndex > max) { + throw new Error( + `Valid "aroundIndex" for array (of size ${array.length}) are between ${min} and ${max}, but you provided ${aroundIndex}.`, + ); + } + const previous = aroundIndex === min ? undefined : array[aroundIndex - 1]; + const next = aroundIndex === max ? undefined : array[aroundIndex + 1]; + return {previous, next}; +} + +export async function mapAsyncSequential( + array: T[], + action: (t: T) => Promise, +): Promise { + const results: R[] = []; + for (const t of array) { + const result = await action(t); + results.push(result); + } + return results; +} + +export async function findAsyncSequential( + array: T[], + predicate: (t: T) => Promise, +): Promise { + for (const t of array) { + if (await predicate(t)) { + return t; + } + } + return undefined; +} + +export function reportMessage( + message: string, + reportingSeverity: ReportingSeverity, +): void { + switch (reportingSeverity) { + case 'ignore': + break; + case 'log': + logger.info(message); + break; + case 'warn': + logger.warn(message); + break; + case 'error': + logger.error(message); + break; + case 'throw': + throw new Error(message); + default: + throw new Error( + `Unexpected "reportingSeverity" value: ${reportingSeverity}.`, + ); + } +} diff --git a/packages/docusaurus-utils/src/pathUtils.ts b/packages/docusaurus-utils/src/pathUtils.ts index 384b73a2be..db005eab56 100644 --- a/packages/docusaurus-utils/src/pathUtils.ts +++ b/packages/docusaurus-utils/src/pathUtils.ts @@ -5,10 +5,9 @@ * LICENSE file in the root directory of this source tree. */ -// Based on https://github.com/gatsbyjs/gatsby/pull/21518/files - import path from 'path'; +// Based on https://github.com/gatsbyjs/gatsby/pull/21518/files // MacOS (APFS) and Windows (NTFS) filename length limit = 255 chars, // Others = 255 bytes const MAX_PATH_SEGMENT_CHARS = 255; @@ -113,3 +112,10 @@ export function escapePath(str: string): string { // Remove the " around the json string; return escaped.substring(1, escaped.length - 1); } + +export function addTrailingPathSeparator(str: string): string { + return str.endsWith(path.sep) + ? str + : // If this is Windows, we need to change the forward slash to backward + `${str.replace(/\/$/, '')}${path.sep}`; +} diff --git a/packages/docusaurus-utils/src/urlUtils.ts b/packages/docusaurus-utils/src/urlUtils.ts index 36417158fe..9a69596651 100644 --- a/packages/docusaurus-utils/src/urlUtils.ts +++ b/packages/docusaurus-utils/src/urlUtils.ts @@ -5,6 +5,9 @@ * LICENSE file in the root directory of this source tree. */ +import {removeSuffix} from './jsUtils'; +import resolvePathnameUnsafe from 'resolve-pathname'; + export function normalizeUrl(rawUrls: string[]): string { const urls = [...rawUrls]; const resultArray = []; @@ -94,3 +97,53 @@ export function getEditUrl( normalizeUrl([editUrl, fileRelativePath.replace(/\\/g, '/')]) : undefined; } + +/** + * Convert filepath to url path. + * Example: 'index.md' -> '/', 'foo/bar.js' -> '/foo/bar', + */ +export function fileToPath(file: string): string { + const indexRE = /(?^|.*\/)index\.(?:mdx?|jsx?|tsx?)$/i; + const extRE = /\.(?:mdx?|jsx?|tsx?)$/; + + if (indexRE.test(file)) { + return file.replace(indexRE, '/$1'); + } + return `/${file.replace(extRE, '').replace(/\\/g, '/')}`; +} + +export function encodePath(userPath: string): string { + return userPath + .split('/') + .map((item) => encodeURIComponent(item)) + .join('/'); +} + +export function isValidPathname(str: string): boolean { + if (!str.startsWith('/')) { + return false; + } + try { + // weird, but is there a better way? + const parsedPathname = new URL(str, 'https://domain.com').pathname; + return parsedPathname === str || parsedPathname === encodeURI(str); + } catch { + return false; + } +} + +// resolve pathname and fail fast if resolution fails +export function resolvePathname(to: string, from?: string): string { + return resolvePathnameUnsafe(to, from); +} +export function addLeadingSlash(str: string): string { + return str.startsWith('/') ? str : `/${str}`; +} + +// TODO deduplicate: also present in @docusaurus/utils-common +export function addTrailingSlash(str: string): string { + return str.endsWith('/') ? str : `${str}/`; +} +export function removeTrailingSlash(str: string): string { + return removeSuffix(str, '/'); +}