mirror of
https://github.com/facebook/docusaurus.git
synced 2025-05-12 00:27:21 +02:00
chore(v2): normalize url properly (#1105)
* refactor(v2): normalize url properly * nits
This commit is contained in:
parent
34dcc0c22e
commit
b84754dde8
6 changed files with 128 additions and 11 deletions
|
@ -8,7 +8,7 @@
|
|||
const globby = require('globby');
|
||||
const path = require('path');
|
||||
const fs = require('fs-extra');
|
||||
const {parse, idx} = require('./utils');
|
||||
const {parse, idx, normalizeUrl} = require('./utils');
|
||||
|
||||
function fileToUrl(fileName) {
|
||||
return fileName
|
||||
|
@ -47,7 +47,7 @@ async function loadBlog({blogDir, env, siteConfig}) {
|
|||
const fileString = await fs.readFile(source, 'utf-8');
|
||||
const {metadata: rawMetadata} = parse(fileString);
|
||||
const metadata = {
|
||||
permalink: path.join(baseUrl, `blog`, fileToUrl(blogFileName)),
|
||||
permalink: normalizeUrl([baseUrl, `blog`, fileToUrl(blogFileName)]),
|
||||
source,
|
||||
...rawMetadata,
|
||||
date,
|
||||
|
@ -67,7 +67,10 @@ async function loadBlog({blogDir, env, siteConfig}) {
|
|||
/* eslint-disable */
|
||||
for (let page = 0; page < numberOfPage; page++) {
|
||||
blogMetadatas.push({
|
||||
permalink: path.join(basePageUrl, `${page > 0 ? `page${page + 1}` : ''}`),
|
||||
permalink: normalizeUrl([
|
||||
basePageUrl,
|
||||
`${page > 0 ? `page${page + 1}` : ''}`,
|
||||
]),
|
||||
language: defaultLangTag,
|
||||
isBlogPage: true,
|
||||
posts: blogMetadatas.slice(page * perPage, (page + 1) * perPage),
|
||||
|
|
|
@ -7,7 +7,7 @@
|
|||
|
||||
const fs = require('fs-extra');
|
||||
const path = require('path');
|
||||
const {getSubFolder, idx, parse} = require('../utils');
|
||||
const {getSubFolder, idx, parse, normalizeUrl} = require('../utils');
|
||||
|
||||
function getLanguage(filepath, refDir, env) {
|
||||
const translationEnabled = idx(env, ['translation', 'enabled']);
|
||||
|
@ -135,9 +135,13 @@ module.exports = async function processMetadata(
|
|||
.replace(/:id/, metadata.id),
|
||||
);
|
||||
} else {
|
||||
metadata.permalink = `${baseUrl}${docsUrl}/${langPart}${versionPart}${
|
||||
metadata.id
|
||||
}`;
|
||||
metadata.permalink = normalizeUrl([
|
||||
baseUrl,
|
||||
docsUrl,
|
||||
langPart,
|
||||
versionPart,
|
||||
metadata.id,
|
||||
]);
|
||||
}
|
||||
|
||||
/* if version */
|
||||
|
|
|
@ -5,6 +5,8 @@
|
|||
* LICENSE file in the root directory of this source tree.
|
||||
*/
|
||||
|
||||
const {normalizeUrl} = require('./utils');
|
||||
|
||||
async function genRoutesConfig({
|
||||
siteConfig = {},
|
||||
docsMetadatas = {},
|
||||
|
@ -33,7 +35,7 @@ async function genRoutesConfig({
|
|||
}`;
|
||||
}
|
||||
|
||||
const rootDocsUrl = baseUrl + docsUrl;
|
||||
const rootDocsUrl = normalizeUrl([baseUrl, docsUrl]);
|
||||
const docsRoutes = `
|
||||
{
|
||||
path: '${rootDocsUrl}',
|
||||
|
@ -133,8 +135,9 @@ async function genRoutesConfig({
|
|||
`import BlogPage from '@theme/BlogPage';\n` +
|
||||
`import Pages from '@theme/Pages';\n` +
|
||||
`import NotFound from '@theme/NotFound';\n` +
|
||||
`const routes = [${docsRoutes},
|
||||
`const routes = [
|
||||
${pagesMetadatas.map(genPagesRoute).join(',')},
|
||||
${docsRoutes},
|
||||
${blogMetadatas.map(genBlogRoute).join(',')},
|
||||
${notFoundRoute}\n];\n` +
|
||||
`export default routes;\n`
|
||||
|
|
|
@ -77,6 +77,67 @@ function parse(fileString) {
|
|||
return {metadata, content};
|
||||
}
|
||||
|
||||
function normalizeUrl(rawUrls) {
|
||||
const urls = rawUrls;
|
||||
const resultArray = [];
|
||||
|
||||
// If the first part is a plain protocol, we combine it with the next part.
|
||||
if (urls[0].match(/^[^/:]+:\/*$/) && urls.length > 1) {
|
||||
const first = urls.shift();
|
||||
urls[0] = first + urls[0];
|
||||
}
|
||||
|
||||
// There must be two or three slashes in the file protocol, two slashes in anything else.
|
||||
if (urls[0].match(/^file:\/\/\//)) {
|
||||
urls[0] = urls[0].replace(/^([^/:]+):\/*/, '$1:///');
|
||||
} else {
|
||||
urls[0] = urls[0].replace(/^([^/:]+):\/*/, '$1://');
|
||||
}
|
||||
|
||||
// eslint-disable-next-line
|
||||
for (let i = 0; i < urls.length; i++) {
|
||||
let component = urls[i];
|
||||
|
||||
if (typeof component !== 'string') {
|
||||
throw new TypeError(`Url must be a string. Received ${component}`);
|
||||
}
|
||||
|
||||
if (component === '') {
|
||||
// eslint-disable-next-line
|
||||
continue;
|
||||
}
|
||||
|
||||
if (i > 0) {
|
||||
// Removing the starting slashes for each component but the first.
|
||||
component = component.replace(/^[/]+/, '');
|
||||
}
|
||||
if (i < urls.length - 1) {
|
||||
// Removing the ending slashes for each component but the last.
|
||||
component = component.replace(/[/]+$/, '');
|
||||
} else {
|
||||
// For the last component we will combine multiple slashes to a single one.
|
||||
component = component.replace(/[/]+$/, '/');
|
||||
}
|
||||
|
||||
resultArray.push(component);
|
||||
}
|
||||
|
||||
let str = resultArray.join('/');
|
||||
// Each input component is now separated by a single slash except the possible first plain protocol part.
|
||||
|
||||
// remove trailing slash before parameters or hash
|
||||
str = str.replace(/\/(\?|&|#[^!])/g, '$1');
|
||||
|
||||
// replace ? in parameters with &
|
||||
const parts = str.split('?');
|
||||
str = parts.shift() + (parts.length > 0 ? '?' : '') + parts.join('&');
|
||||
|
||||
// dedupe forward slashes
|
||||
str = str.replace(/^\/+/, '/');
|
||||
|
||||
return str;
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
encodePath,
|
||||
generate,
|
||||
|
@ -84,5 +145,6 @@ module.exports = {
|
|||
fileToComponentName,
|
||||
getSubFolder,
|
||||
idx,
|
||||
normalizeUrl,
|
||||
parse,
|
||||
};
|
||||
|
|
|
@ -11,6 +11,7 @@ import {
|
|||
fileToComponentName,
|
||||
idx,
|
||||
getSubFolder,
|
||||
normalizeUrl,
|
||||
} from '@lib/load/utils';
|
||||
|
||||
describe('load utils', () => {
|
||||
|
@ -104,4 +105,48 @@ describe('load utils', () => {
|
|||
expect(getSubFolder(testD, 'docs')).toBe('ro');
|
||||
expect(getSubFolder(testE, 'docs')).toBeNull();
|
||||
});
|
||||
|
||||
test('normalizeUrl', () => {
|
||||
const asserts = [
|
||||
{
|
||||
input: ['/', '/'],
|
||||
output: '/',
|
||||
},
|
||||
{
|
||||
input: ['/', 'docs'],
|
||||
output: '/docs',
|
||||
},
|
||||
{
|
||||
input: ['/', 'docs', 'en', 'next', 'blog'],
|
||||
output: '/docs/en/next/blog',
|
||||
},
|
||||
{
|
||||
input: ['/test/', '/docs', 'ro', 'doc1'],
|
||||
output: '/test/docs/ro/doc1',
|
||||
},
|
||||
{
|
||||
input: ['', '/', 'ko', 'hello'],
|
||||
output: '/ko/hello',
|
||||
},
|
||||
{
|
||||
input: ['hello', 'world'],
|
||||
output: 'hello/world',
|
||||
},
|
||||
{
|
||||
input: ['http://www.google.com/', 'foo/bar', '?test=123'],
|
||||
output: 'http://www.google.com/foo/bar?test=123',
|
||||
},
|
||||
{
|
||||
input: ['http:', 'www.google.com///', 'foo/bar', '?test=123'],
|
||||
output: 'http://www.google.com/foo/bar?test=123',
|
||||
},
|
||||
{
|
||||
input: ['http://foobar.com', '', 'test'],
|
||||
output: 'http://foobar.com/test',
|
||||
},
|
||||
];
|
||||
asserts.forEach(testCase => {
|
||||
expect(normalizeUrl(testCase.input)).toBe(testCase.output);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
|
|
@ -6,8 +6,8 @@
|
|||
*/
|
||||
|
||||
module.exports = {
|
||||
title: 'docusaurus',
|
||||
tagline: '📝⚡️ Transform your document (문서) to a website',
|
||||
title: 'Docusaurus',
|
||||
tagline: '⚡️ Painless static site generator',
|
||||
organizationName: 'facebook',
|
||||
projectName: 'docusaurus',
|
||||
baseUrl: '/',
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue