mirror of
https://github.com/facebook/docusaurus.git
synced 2025-05-30 17:37:09 +02:00
chore: move to monorepo (#1297)
* chore: move to monorepo * lint all js file * simplify circleCI * fix failing tests * fix tests due to folder rename * fix test since v1 website is renamed
This commit is contained in:
parent
6b1d2e8c9c
commit
1f91d19a8c
619 changed files with 12713 additions and 26817 deletions
12
packages/docusaurus-1.x/lib/server/__mocks__/tiny-lr.js
Normal file
12
packages/docusaurus-1.x/lib/server/__mocks__/tiny-lr.js
Normal file
|
@ -0,0 +1,12 @@
|
|||
/**
|
||||
* Copyright (c) 2017-present, Facebook, Inc.
|
||||
*
|
||||
* This source code is licensed under the MIT license found in the
|
||||
* LICENSE file in the root directory of this source tree.
|
||||
*/
|
||||
|
||||
const tinylrServer = {
|
||||
listen: jest.fn(),
|
||||
};
|
||||
|
||||
module.exports = () => tinylrServer;
|
|
@ -0,0 +1,11 @@
|
|||
---
|
||||
title: Docusaurus
|
||||
author: Endilie
|
||||
authorURL: https://github.com/endiliey
|
||||
authorFBID: 100000251103620
|
||||
authorTwitter: endiliey
|
||||
---
|
||||
|
||||

|
||||
|
||||
We are very happy to introduce [Docusaurus](https://github.com/facebook/Docusaurus) to help you manage one or many open source websites.
|
|
@ -0,0 +1,21 @@
|
|||
---
|
||||
id: doc1
|
||||
title: Document 1
|
||||
---
|
||||
|
||||
Docusaurus is the best :)
|
||||
|
||||

|
||||
|
||||
```js
|
||||
console.log("Docusaurus");
|
||||
```
|
||||
|
||||

|
||||

|
||||
|
||||
Don't replace the one below
|
||||
```md
|
||||
|
||||

|
||||
```
|
|
@ -0,0 +1,29 @@
|
|||
---
|
||||
id: doc2
|
||||
title: Document 2
|
||||
---
|
||||
|
||||
### Existing Docs
|
||||
|
||||
- [doc1](doc1.md)
|
||||
- [doc2](./doc2.md)
|
||||
|
||||
### Non-existing Docs
|
||||
|
||||
- [hahaha](hahaha.md)
|
||||
|
||||
## Repeating Docs
|
||||
|
||||
- [doc1](doc1.md)
|
||||
- [doc2](./doc2.md)
|
||||
|
||||
## Do not replace this
|
||||
```md
|
||||

|
||||
```
|
||||
|
||||
```js
|
||||
const doc1 = foo();
|
||||
console.log("[image2](assets/image2.jpg)");
|
||||
const testStr = ``;
|
||||
```
|
|
@ -0,0 +1,72 @@
|
|||
/**
|
||||
* Copyright (c) 2017-present, Facebook, Inc.
|
||||
*
|
||||
* This source code is licensed under the MIT license found in the
|
||||
* LICENSE file in the root directory of this source tree.
|
||||
*/
|
||||
|
||||
module.exports = {
|
||||
'en-doc1': {
|
||||
id: 'en-doc1',
|
||||
title: 'Document 1',
|
||||
source: 'doc1.md',
|
||||
version: 'next',
|
||||
permalink: 'docs/en/next/doc1.html',
|
||||
localized_id: 'doc1',
|
||||
language: 'en',
|
||||
sidebar: 'docs',
|
||||
category: 'Test',
|
||||
next_id: 'doc2',
|
||||
next: 'en-doc2',
|
||||
next_title: 'Document 2',
|
||||
subcategory: 'Sub Cat 1',
|
||||
sort: 1,
|
||||
},
|
||||
'en-doc2': {
|
||||
id: 'en-doc2',
|
||||
title: 'Document 2',
|
||||
source: 'doc2.md',
|
||||
version: 'next',
|
||||
permalink: 'docs/en/next/doc2.html',
|
||||
localized_id: 'doc2',
|
||||
language: 'en',
|
||||
sidebar: 'docs',
|
||||
category: 'Test',
|
||||
previous_id: 'doc1',
|
||||
previous: 'en-doc1',
|
||||
previous_title: 'Document 1',
|
||||
subcategory: 'Sub Cat 1',
|
||||
sort: 2,
|
||||
},
|
||||
'en-doc3': {
|
||||
id: 'en-doc3',
|
||||
title: 'Document 3',
|
||||
source: 'doc3.md',
|
||||
version: 'next',
|
||||
permalink: 'docs/en/next/doc3.html',
|
||||
localized_id: 'doc3',
|
||||
language: 'en',
|
||||
sidebar: 'docs',
|
||||
category: 'Test',
|
||||
previous_id: 'doc2',
|
||||
previous: 'en-doc2',
|
||||
previous_title: 'Document 2',
|
||||
subcategory: 'Sub Cat 2',
|
||||
sort: 3,
|
||||
},
|
||||
'en-doc4': {
|
||||
id: 'en-doc4',
|
||||
title: 'Document 4',
|
||||
source: 'doc4.md',
|
||||
version: 'next',
|
||||
permalink: 'docs/en/next/doc4.html',
|
||||
localized_id: 'doc4',
|
||||
language: 'en',
|
||||
sidebar: 'docs',
|
||||
category: 'Test 2',
|
||||
previous_id: 'doc3',
|
||||
previous: 'en-doc3',
|
||||
previous_title: 'Document 3',
|
||||
sort: 4,
|
||||
},
|
||||
};
|
|
@ -0,0 +1,155 @@
|
|||
/**
|
||||
* Copyright (c) 2017-present, Facebook, Inc.
|
||||
*
|
||||
* This source code is licensed under the MIT license found in the
|
||||
* LICENSE file in the root directory of this source tree.
|
||||
*/
|
||||
|
||||
module.exports = {
|
||||
'en-doc1': {
|
||||
id: 'en-doc1',
|
||||
title: 'Document 1',
|
||||
source: 'doc1.md',
|
||||
version: 'next',
|
||||
permalink: 'docs/en/next/doc1.html',
|
||||
localized_id: 'doc1',
|
||||
language: 'en',
|
||||
sidebar: 'docs',
|
||||
category: 'Test',
|
||||
next_id: 'doc2',
|
||||
next: 'en-doc2',
|
||||
next_title: 'Document 2',
|
||||
sort: 1,
|
||||
},
|
||||
'en-doc2': {
|
||||
id: 'en-doc2',
|
||||
title: 'Document 2',
|
||||
source: 'doc2.md',
|
||||
version: 'next',
|
||||
permalink: 'docs/en/next/doc2.html',
|
||||
localized_id: 'doc2',
|
||||
language: 'en',
|
||||
sidebar: 'docs',
|
||||
category: 'Test',
|
||||
previous_id: 'doc1',
|
||||
previous: 'en-doc1',
|
||||
previous_title: 'Document 1',
|
||||
sort: 2,
|
||||
},
|
||||
'en-doc3': {
|
||||
id: 'en-doc3',
|
||||
title: 'Document 3',
|
||||
source: 'doc3.md',
|
||||
version: 'next',
|
||||
permalink: 'docs/en/next/doc3.html',
|
||||
localized_id: 'doc3',
|
||||
language: 'en',
|
||||
sidebar: 'docs',
|
||||
category: 'Test 2',
|
||||
previous_id: 'doc2',
|
||||
previous: 'en-doc2',
|
||||
previous_title: 'Document 2',
|
||||
sort: 3,
|
||||
},
|
||||
'en-reflinks': {
|
||||
id: 'en-reflinks',
|
||||
title: 'Reference Links',
|
||||
source: 'reflinks.md',
|
||||
version: 'next',
|
||||
permalink: 'docs/en/next/reflinks.html',
|
||||
localized_id: 'reflinks',
|
||||
language: 'en',
|
||||
sidebar: 'docs',
|
||||
category: 'Test 2',
|
||||
previous_id: 'doc3',
|
||||
previous: 'en-doc3',
|
||||
previous_title: 'Document 3',
|
||||
sort: 4,
|
||||
},
|
||||
'ko-doc1': {
|
||||
id: 'ko-doc1',
|
||||
title: '문서 1',
|
||||
source: 'doc1.md',
|
||||
version: 'next',
|
||||
permalink: 'docs/ko/next/doc1.html',
|
||||
localized_id: 'doc1',
|
||||
language: 'ko',
|
||||
sidebar: 'docs',
|
||||
category: 'Test',
|
||||
next_id: 'doc2',
|
||||
next: 'ko-doc2',
|
||||
next_title: '문서 2',
|
||||
},
|
||||
'ko-doc2': {
|
||||
id: 'ko-doc2',
|
||||
title: '문서 2',
|
||||
source: 'doc2.md',
|
||||
version: 'next',
|
||||
permalink: 'docs/ko/next/doc2.html',
|
||||
localized_id: 'doc2',
|
||||
language: 'ko',
|
||||
sidebar: 'docs',
|
||||
category: 'Test',
|
||||
previous_id: 'doc1',
|
||||
previous: 'ko-doc1',
|
||||
previous_title: '문서 1',
|
||||
},
|
||||
'en-version-1.0.0-doc1': {
|
||||
id: 'en-version-1.0.0-doc1',
|
||||
original_id: 'doc1',
|
||||
title: 'Document 1',
|
||||
source: 'version-1.0.0/doc1.md',
|
||||
version: '1.0.0',
|
||||
permalink: 'docs/en/doc1.html',
|
||||
localized_id: 'version-1.0.0-doc1',
|
||||
language: 'en',
|
||||
sidebar: 'version-1.0.0-docs',
|
||||
category: 'Test',
|
||||
next_id: 'doc2',
|
||||
next: 'en-version-1.0.0-doc2',
|
||||
next_title: 'Document 2',
|
||||
},
|
||||
'en-version-1.0.0-doc2': {
|
||||
id: 'en-version-1.0.0-doc2',
|
||||
original_id: 'doc2',
|
||||
title: 'Document 2',
|
||||
source: 'version-1.0.0/doc2.md',
|
||||
version: '1.0.0',
|
||||
permalink: 'docs/en/doc2.html',
|
||||
localized_id: 'version-1.0.0-doc2',
|
||||
language: 'en',
|
||||
sidebar: 'version-1.0.0-docs',
|
||||
category: 'Test',
|
||||
previous_id: 'doc1',
|
||||
previous: 'en-version-1.0.0-doc1',
|
||||
previous_title: 'Document 1',
|
||||
},
|
||||
'ko-version-1.0.0-doc1': {
|
||||
id: 'ko-version-1.0.0-doc1',
|
||||
title: '문서 1',
|
||||
source: 'version-1.0.0/doc1.md',
|
||||
version: '1.0.0',
|
||||
permalink: 'docs/ko/doc1.html',
|
||||
localized_id: 'version-1.0.0-doc1',
|
||||
language: 'ko',
|
||||
sidebar: 'version-1.0.0-docs',
|
||||
category: 'Test',
|
||||
next_id: 'doc2',
|
||||
next: 'ko-version-1.0.0-doc2',
|
||||
next_title: '문서 2',
|
||||
},
|
||||
'ko-version-1.0.0-doc2': {
|
||||
id: 'ko-version-1.0.0-doc2',
|
||||
title: '문서 2',
|
||||
source: 'version-1.0.0/doc2.md',
|
||||
version: '1.0.0',
|
||||
permalink: 'docs/ko/doc2.html',
|
||||
localized_id: 'version-1.0.0-doc2',
|
||||
language: 'ko',
|
||||
sidebar: 'version-1.0.0-docs',
|
||||
category: 'Test',
|
||||
previous_id: 'doc1',
|
||||
previous: 'ko-version-1.0.0-doc1',
|
||||
previous_title: '문서 1',
|
||||
},
|
||||
};
|
|
@ -0,0 +1,36 @@
|
|||
---
|
||||
id: reflinks
|
||||
title: Reference Links
|
||||
---
|
||||
|
||||
### Existing Docs
|
||||
|
||||
- [doc1][doc1]
|
||||
- [doc2][doc2]
|
||||
|
||||
### Non-existing Docs
|
||||
|
||||
- [hahaha][hahaha]
|
||||
|
||||
## Repeating Docs
|
||||
|
||||
- [doc1][doc1]
|
||||
- [doc2][doc2]
|
||||
|
||||
## Do not replace this
|
||||
```md
|
||||
![image1][image1]
|
||||
```
|
||||
|
||||
```js
|
||||
const doc1 = foo();
|
||||
console.log("[image2][image2]");
|
||||
const testStr = `![image3][image3]`;
|
||||
```
|
||||
|
||||
[doc1]: doc1.md
|
||||
[doc2]: ./doc2.md
|
||||
[hahaha]: hahaha.md
|
||||
[image1]: assets/image1.png
|
||||
[image2]: assets/image2.jpg
|
||||
[image3]: assets/image3.gif
|
|
@ -0,0 +1,33 @@
|
|||
/**
|
||||
* Copyright (c) 2017-present, Facebook, Inc.
|
||||
*
|
||||
* This source code is licensed under the MIT license found in the
|
||||
* LICENSE file in the root directory of this source tree.
|
||||
*/
|
||||
|
||||
module.exports = {
|
||||
docs: {
|
||||
'First Category': ['doc1', 'doc2'],
|
||||
'Second Category': [
|
||||
'doc3',
|
||||
{
|
||||
type: 'subcategory',
|
||||
label: 'First Subcategory',
|
||||
ids: ['doc4'],
|
||||
},
|
||||
'doc5',
|
||||
],
|
||||
'Third Category': [
|
||||
{
|
||||
type: 'subcategory',
|
||||
label: 'Second Subcategory',
|
||||
ids: ['doc6', 'doc7'],
|
||||
},
|
||||
{
|
||||
type: 'subcategory',
|
||||
label: 'Third Subcategory',
|
||||
ids: ['doc8'],
|
||||
},
|
||||
],
|
||||
},
|
||||
};
|
|
@ -0,0 +1,13 @@
|
|||
/**
|
||||
* Copyright (c) 2017-present, Facebook, Inc.
|
||||
*
|
||||
* This source code is licensed under the MIT license found in the
|
||||
* LICENSE file in the root directory of this source tree.
|
||||
*/
|
||||
|
||||
module.exports = {
|
||||
docs: {
|
||||
'First Category': ['doc1', 'doc2'],
|
||||
'Second Category': ['doc4', 'doc3'],
|
||||
},
|
||||
};
|
|
@ -0,0 +1,9 @@
|
|||
---
|
||||
id: doc3
|
||||
title: Document 3
|
||||
---
|
||||
|
||||
Test subdirectory file
|
||||
|
||||
### Replace this
|
||||
- [doc3](subdir/doc3.md)
|
|
@ -0,0 +1,25 @@
|
|||
/**
|
||||
* Copyright (c) 2017-present, Facebook, Inc.
|
||||
*
|
||||
* This source code is licensed under the MIT license found in the
|
||||
* LICENSE file in the root directory of this source tree.
|
||||
*/
|
||||
|
||||
.hljs {
|
||||
margin-left: -15px;
|
||||
margin-right: -15px;
|
||||
border: 1px solid #eee;
|
||||
border-radius: 6px;
|
||||
padding: 15px;
|
||||
font-size: 15px;
|
||||
max-width: 50rem;
|
||||
}
|
||||
.hljs.javascript {
|
||||
background-color: rgba(247, 223, 30, 0.03);
|
||||
}
|
||||
.hljs .comment {
|
||||
opacity: 0.7;
|
||||
}
|
||||
::placeholder {
|
||||
color: gray;
|
||||
}
|
|
@ -0,0 +1,26 @@
|
|||
---
|
||||
title: This is not a css
|
||||
---
|
||||
|
||||
This is a markdown, not a css
|
||||
|
||||
.homeWrapperInner .homeCodeSnippet > div:nth-child(1) {
|
||||
display: none;
|
||||
}
|
||||
|
||||
@media (max-width: 480px) {
|
||||
.projectTitle {
|
||||
font-size: 30px;
|
||||
}
|
||||
|
||||
.homeCodeSnippet .hljs {
|
||||
font-size: 13px;
|
||||
padding: 0;
|
||||
}
|
||||
.homeWrapperInner .homeCodeSnippet > div:nth-child(1) {
|
||||
display: block;
|
||||
}
|
||||
.homeWrapperInner .homeCodeSnippet > div:nth-child(2) {
|
||||
display: none;
|
||||
}
|
||||
}
|
|
@ -0,0 +1,65 @@
|
|||
// Jest Snapshot v1, https://goo.gl/fbAQLP
|
||||
|
||||
exports[`getMetadata blog file 1`] = `
|
||||
Object {
|
||||
"author": "Endilie",
|
||||
"authorFBID": 100000251103620,
|
||||
"authorTwitter": "endiliey",
|
||||
"authorURL": "https://github.com/endiliey",
|
||||
"content": "
|
||||

|
||||
|
||||
We are very happy to introduce [Docusaurus](https://github.com/facebook/Docusaurus) to help you manage one or many open source websites.",
|
||||
"id": "Docusaurus",
|
||||
"path": "2018/08/17/docusaurus.html",
|
||||
"title": "Docusaurus",
|
||||
}
|
||||
`;
|
||||
|
||||
exports[`replaceAssetsLink does not transform document without valid assets link 1`] = `
|
||||
"
|
||||
### Existing Docs
|
||||
|
||||
- [doc1](doc1.md)
|
||||
- [doc2](./doc2.md)
|
||||
|
||||
### Non-existing Docs
|
||||
|
||||
- [hahaha](hahaha.md)
|
||||
|
||||
## Repeating Docs
|
||||
|
||||
- [doc1](doc1.md)
|
||||
- [doc2](./doc2.md)
|
||||
|
||||
## Do not replace this
|
||||
\`\`\`md
|
||||

|
||||
\`\`\`
|
||||
|
||||
\`\`\`js
|
||||
const doc1 = foo();
|
||||
console.log(\\"[image2](assets/image2.jpg)\\");
|
||||
const testStr = \`\`;
|
||||
\`\`\`"
|
||||
`;
|
||||
|
||||
exports[`replaceAssetsLink transform document with valid assets link 1`] = `
|
||||
"
|
||||
Docusaurus is the best :)
|
||||
|
||||

|
||||
|
||||
\`\`\`js
|
||||
console.log(\\"Docusaurus\\");
|
||||
\`\`\`
|
||||
|
||||

|
||||

|
||||
|
||||
Don't replace the one below
|
||||
\`\`\`md
|
||||
|
||||

|
||||
\`\`\`"
|
||||
`;
|
|
@ -0,0 +1,142 @@
|
|||
// Jest Snapshot v1, https://goo.gl/fbAQLP
|
||||
|
||||
exports[`mdToHtmlify transform link even in subdirectory 1`] = `
|
||||
"
|
||||
Test subdirectory file
|
||||
|
||||
### Replace this
|
||||
- [doc3](/docs/subdir/doc3)
|
||||
"
|
||||
`;
|
||||
|
||||
exports[`mdToHtmlify transform nothing 1`] = `
|
||||
"
|
||||
Docusaurus is the best :)
|
||||
|
||||

|
||||
|
||||
\`\`\`js
|
||||
console.log(\\"Docusaurus\\");
|
||||
\`\`\`
|
||||
|
||||

|
||||

|
||||
|
||||
Don't replace the one below
|
||||
\`\`\`md
|
||||
|
||||

|
||||
\`\`\`"
|
||||
`;
|
||||
|
||||
exports[`mdToHtmlify transform to correct link 1`] = `
|
||||
"
|
||||
### Existing Docs
|
||||
|
||||
- [doc1](/docs/en/next/doc1)
|
||||
- [doc2](/docs/en/next/doc2)
|
||||
|
||||
### Non-existing Docs
|
||||
|
||||
- [hahaha](hahaha.md)
|
||||
|
||||
## Repeating Docs
|
||||
|
||||
- [doc1](/docs/en/next/doc1)
|
||||
- [doc2](/docs/en/next/doc2)
|
||||
|
||||
## Do not replace this
|
||||
\`\`\`md
|
||||

|
||||
\`\`\`
|
||||
|
||||
\`\`\`js
|
||||
const doc1 = foo();
|
||||
console.log(\\"[image2](assets/image2.jpg)\\");
|
||||
const testStr = \`\`;
|
||||
\`\`\`"
|
||||
`;
|
||||
|
||||
exports[`mdToHtmlify transforms reference links 1`] = `
|
||||
"
|
||||
### Existing Docs
|
||||
|
||||
- [doc1][doc1]
|
||||
- [doc2][doc2]
|
||||
|
||||
### Non-existing Docs
|
||||
|
||||
- [hahaha][hahaha]
|
||||
|
||||
## Repeating Docs
|
||||
|
||||
- [doc1][doc1]
|
||||
- [doc2][doc2]
|
||||
|
||||
## Do not replace this
|
||||
\`\`\`md
|
||||
![image1][image1]
|
||||
\`\`\`
|
||||
|
||||
\`\`\`js
|
||||
const doc1 = foo();
|
||||
console.log(\\"[image2][image2]\\");
|
||||
const testStr = \`![image3][image3]\`;
|
||||
\`\`\`
|
||||
|
||||
[doc1]: /docs/en/next/doc1
|
||||
[doc2]: /docs/en/next/doc2
|
||||
[hahaha]: hahaha.md
|
||||
[image1]: assets/image1.png
|
||||
[image2]: assets/image2.jpg
|
||||
[image3]: assets/image3.gif
|
||||
"
|
||||
`;
|
||||
|
||||
exports[`replaceAssetsLink does not transform document without valid assets link 1`] = `
|
||||
"
|
||||
### Existing Docs
|
||||
|
||||
- [doc1](doc1.md)
|
||||
- [doc2](./doc2.md)
|
||||
|
||||
### Non-existing Docs
|
||||
|
||||
- [hahaha](hahaha.md)
|
||||
|
||||
## Repeating Docs
|
||||
|
||||
- [doc1](doc1.md)
|
||||
- [doc2](./doc2.md)
|
||||
|
||||
## Do not replace this
|
||||
\`\`\`md
|
||||

|
||||
\`\`\`
|
||||
|
||||
\`\`\`js
|
||||
const doc1 = foo();
|
||||
console.log(\\"[image2](assets/image2.jpg)\\");
|
||||
const testStr = \`\`;
|
||||
\`\`\`"
|
||||
`;
|
||||
|
||||
exports[`replaceAssetsLink transform document with valid assets link 1`] = `
|
||||
"
|
||||
Docusaurus is the best :)
|
||||
|
||||

|
||||
|
||||
\`\`\`js
|
||||
console.log(\\"Docusaurus\\");
|
||||
\`\`\`
|
||||
|
||||

|
||||

|
||||
|
||||
Don't replace the one below
|
||||
\`\`\`md
|
||||
|
||||

|
||||
\`\`\`"
|
||||
`;
|
|
@ -0,0 +1,194 @@
|
|||
// Jest Snapshot v1, https://goo.gl/fbAQLP
|
||||
|
||||
exports[`readCategories should return proper categories and their pages 1`] = `
|
||||
Array [
|
||||
Object {
|
||||
"children": Array [
|
||||
Object {
|
||||
"item": Object {
|
||||
"category": "Test",
|
||||
"id": "en-doc1",
|
||||
"language": "en",
|
||||
"localized_id": "doc1",
|
||||
"next": "en-doc2",
|
||||
"next_id": "doc2",
|
||||
"next_title": "Document 2",
|
||||
"permalink": "docs/en/next/doc1.html",
|
||||
"sidebar": "docs",
|
||||
"sort": 1,
|
||||
"source": "doc1.md",
|
||||
"title": "Document 1",
|
||||
"version": "next",
|
||||
},
|
||||
"type": "LINK",
|
||||
},
|
||||
Object {
|
||||
"item": Object {
|
||||
"category": "Test",
|
||||
"id": "en-doc2",
|
||||
"language": "en",
|
||||
"localized_id": "doc2",
|
||||
"permalink": "docs/en/next/doc2.html",
|
||||
"previous": "en-doc1",
|
||||
"previous_id": "doc1",
|
||||
"previous_title": "Document 1",
|
||||
"sidebar": "docs",
|
||||
"sort": 2,
|
||||
"source": "doc2.md",
|
||||
"title": "Document 2",
|
||||
"version": "next",
|
||||
},
|
||||
"type": "LINK",
|
||||
},
|
||||
],
|
||||
"title": "Test",
|
||||
"type": "CATEGORY",
|
||||
},
|
||||
Object {
|
||||
"children": Array [
|
||||
Object {
|
||||
"item": Object {
|
||||
"category": "Test 2",
|
||||
"id": "en-doc3",
|
||||
"language": "en",
|
||||
"localized_id": "doc3",
|
||||
"permalink": "docs/en/next/doc3.html",
|
||||
"previous": "en-doc2",
|
||||
"previous_id": "doc2",
|
||||
"previous_title": "Document 2",
|
||||
"sidebar": "docs",
|
||||
"sort": 3,
|
||||
"source": "doc3.md",
|
||||
"title": "Document 3",
|
||||
"version": "next",
|
||||
},
|
||||
"type": "LINK",
|
||||
},
|
||||
Object {
|
||||
"item": Object {
|
||||
"category": "Test 2",
|
||||
"id": "en-reflinks",
|
||||
"language": "en",
|
||||
"localized_id": "reflinks",
|
||||
"permalink": "docs/en/next/reflinks.html",
|
||||
"previous": "en-doc3",
|
||||
"previous_id": "doc3",
|
||||
"previous_title": "Document 3",
|
||||
"sidebar": "docs",
|
||||
"sort": 4,
|
||||
"source": "reflinks.md",
|
||||
"title": "Reference Links",
|
||||
"version": "next",
|
||||
},
|
||||
"type": "LINK",
|
||||
},
|
||||
],
|
||||
"title": "Test 2",
|
||||
"type": "CATEGORY",
|
||||
},
|
||||
]
|
||||
`;
|
||||
|
||||
exports[`readCategories should return proper data with categories and sub categories 1`] = `
|
||||
Array [
|
||||
Object {
|
||||
"children": Array [
|
||||
Object {
|
||||
"children": Array [
|
||||
Object {
|
||||
"item": Object {
|
||||
"category": "Test",
|
||||
"id": "en-doc1",
|
||||
"language": "en",
|
||||
"localized_id": "doc1",
|
||||
"next": "en-doc2",
|
||||
"next_id": "doc2",
|
||||
"next_title": "Document 2",
|
||||
"permalink": "docs/en/next/doc1.html",
|
||||
"sidebar": "docs",
|
||||
"sort": 1,
|
||||
"source": "doc1.md",
|
||||
"subcategory": "Sub Cat 1",
|
||||
"title": "Document 1",
|
||||
"version": "next",
|
||||
},
|
||||
"type": "LINK",
|
||||
},
|
||||
Object {
|
||||
"item": Object {
|
||||
"category": "Test",
|
||||
"id": "en-doc2",
|
||||
"language": "en",
|
||||
"localized_id": "doc2",
|
||||
"permalink": "docs/en/next/doc2.html",
|
||||
"previous": "en-doc1",
|
||||
"previous_id": "doc1",
|
||||
"previous_title": "Document 1",
|
||||
"sidebar": "docs",
|
||||
"sort": 2,
|
||||
"source": "doc2.md",
|
||||
"subcategory": "Sub Cat 1",
|
||||
"title": "Document 2",
|
||||
"version": "next",
|
||||
},
|
||||
"type": "LINK",
|
||||
},
|
||||
],
|
||||
"title": "Sub Cat 1",
|
||||
"type": "SUBCATEGORY",
|
||||
},
|
||||
Object {
|
||||
"children": Array [
|
||||
Object {
|
||||
"item": Object {
|
||||
"category": "Test",
|
||||
"id": "en-doc3",
|
||||
"language": "en",
|
||||
"localized_id": "doc3",
|
||||
"permalink": "docs/en/next/doc3.html",
|
||||
"previous": "en-doc2",
|
||||
"previous_id": "doc2",
|
||||
"previous_title": "Document 2",
|
||||
"sidebar": "docs",
|
||||
"sort": 3,
|
||||
"source": "doc3.md",
|
||||
"subcategory": "Sub Cat 2",
|
||||
"title": "Document 3",
|
||||
"version": "next",
|
||||
},
|
||||
"type": "LINK",
|
||||
},
|
||||
],
|
||||
"title": "Sub Cat 2",
|
||||
"type": "SUBCATEGORY",
|
||||
},
|
||||
],
|
||||
"title": "Test",
|
||||
"type": "CATEGORY",
|
||||
},
|
||||
Object {
|
||||
"children": Array [
|
||||
Object {
|
||||
"item": Object {
|
||||
"category": "Test 2",
|
||||
"id": "en-doc4",
|
||||
"language": "en",
|
||||
"localized_id": "doc4",
|
||||
"permalink": "docs/en/next/doc4.html",
|
||||
"previous": "en-doc3",
|
||||
"previous_id": "doc3",
|
||||
"previous_title": "Document 3",
|
||||
"sidebar": "docs",
|
||||
"sort": 4,
|
||||
"source": "doc4.md",
|
||||
"title": "Document 4",
|
||||
"version": "next",
|
||||
},
|
||||
"type": "LINK",
|
||||
},
|
||||
],
|
||||
"title": "Test 2",
|
||||
"type": "CATEGORY",
|
||||
},
|
||||
]
|
||||
`;
|
|
@ -0,0 +1,70 @@
|
|||
// Jest Snapshot v1, https://goo.gl/fbAQLP
|
||||
|
||||
exports[`readMetadata readSidebar should verify sub category data and verify order 1`] = `
|
||||
Object {
|
||||
"doc1": Object {
|
||||
"category": "First Category",
|
||||
"next": "doc2",
|
||||
"order": 1,
|
||||
"previous": null,
|
||||
"sidebar": "docs",
|
||||
"subcategory": null,
|
||||
},
|
||||
"doc2": Object {
|
||||
"category": "First Category",
|
||||
"next": "doc3",
|
||||
"order": 2,
|
||||
"previous": "doc1",
|
||||
"sidebar": "docs",
|
||||
"subcategory": null,
|
||||
},
|
||||
"doc3": Object {
|
||||
"category": "Second Category",
|
||||
"next": "doc4",
|
||||
"order": 3,
|
||||
"previous": "doc2",
|
||||
"sidebar": "docs",
|
||||
"subcategory": null,
|
||||
},
|
||||
"doc4": Object {
|
||||
"category": "Second Category",
|
||||
"next": "doc5",
|
||||
"order": 4,
|
||||
"previous": "doc3",
|
||||
"sidebar": "docs",
|
||||
"subcategory": "First Subcategory",
|
||||
},
|
||||
"doc5": Object {
|
||||
"category": "Second Category",
|
||||
"next": "doc6",
|
||||
"order": 5,
|
||||
"previous": "doc4",
|
||||
"sidebar": "docs",
|
||||
"subcategory": null,
|
||||
},
|
||||
"doc6": Object {
|
||||
"category": "Third Category",
|
||||
"next": "doc7",
|
||||
"order": 6,
|
||||
"previous": "doc5",
|
||||
"sidebar": "docs",
|
||||
"subcategory": "Second Subcategory",
|
||||
},
|
||||
"doc7": Object {
|
||||
"category": "Third Category",
|
||||
"next": "doc8",
|
||||
"order": 7,
|
||||
"previous": "doc6",
|
||||
"sidebar": "docs",
|
||||
"subcategory": "Second Subcategory",
|
||||
},
|
||||
"doc8": Object {
|
||||
"category": "Third Category",
|
||||
"next": null,
|
||||
"order": 8,
|
||||
"previous": "doc7",
|
||||
"sidebar": "docs",
|
||||
"subcategory": "Third Subcategory",
|
||||
},
|
||||
}
|
||||
`;
|
|
@ -0,0 +1,43 @@
|
|||
// Jest Snapshot v1, https://goo.gl/fbAQLP
|
||||
|
||||
exports[`server utils autoprefix css 1`] = `
|
||||
"/**
|
||||
* Copyright (c) 2017-present, Facebook, Inc.
|
||||
*
|
||||
* This source code is licensed under the MIT license found in the
|
||||
* LICENSE file in the root directory of this source tree.
|
||||
*/
|
||||
|
||||
.hljs {
|
||||
margin-left: -15px;
|
||||
margin-right: -15px;
|
||||
border: 1px solid #eee;
|
||||
border-radius: 6px;
|
||||
padding: 15px;
|
||||
font-size: 15px;
|
||||
max-width: 50rem;
|
||||
}
|
||||
.hljs.javascript {
|
||||
background-color: rgba(247, 223, 30, 0.03);
|
||||
}
|
||||
.hljs .comment {
|
||||
opacity: 0.7;
|
||||
}
|
||||
::-webkit-input-placeholder {
|
||||
color: gray;
|
||||
}
|
||||
:-ms-input-placeholder {
|
||||
color: gray;
|
||||
}
|
||||
::-ms-input-placeholder {
|
||||
color: gray;
|
||||
}
|
||||
::placeholder {
|
||||
color: gray;
|
||||
}
|
||||
"
|
||||
`;
|
||||
|
||||
exports[`server utils minify css 1`] = `".hljs{margin-left:-15px;margin-right:-15px;border:1px solid #eee;border-radius:6px;padding:15px;font-size:15px;max-width:50rem}.hljs.javascript{background-color:rgba(247,223,30,.03)}.hljs .comment{opacity:.7}::placeholder{color:gray}"`;
|
||||
|
||||
exports[`server utils minify css 2`] = `[Error: Unexpected "space" found.]`;
|
109
packages/docusaurus-1.x/lib/server/__tests__/blog.test.js
Normal file
109
packages/docusaurus-1.x/lib/server/__tests__/blog.test.js
Normal file
|
@ -0,0 +1,109 @@
|
|||
/**
|
||||
* Copyright (c) 2017-present, Facebook, Inc.
|
||||
*
|
||||
* This source code is licensed under the MIT license found in the
|
||||
* LICENSE file in the root directory of this source tree.
|
||||
*/
|
||||
const path = require('path');
|
||||
const fs = require('fs-extra');
|
||||
const blog = require('../blog');
|
||||
const metadataUtils = require('../metadataUtils');
|
||||
const {replaceAssetsLink} = require('../utils.js');
|
||||
|
||||
jest.mock(`${process.cwd()}/siteConfig.js`, () => ({baseUrl: '/'}), {
|
||||
virtual: true,
|
||||
});
|
||||
|
||||
const testFile = path.join(
|
||||
__dirname,
|
||||
'__fixtures__',
|
||||
'2018-08-17-docusaurus.md',
|
||||
);
|
||||
|
||||
fs.existsSync = jest.fn().mockReturnValue(true);
|
||||
|
||||
describe('getMetadata', () => {
|
||||
test('file does not exist', () => {
|
||||
fs.existsSync.mockReturnValueOnce(null);
|
||||
expect(blog.getMetadata('/this/path/does-not-exist/')).toBeNull();
|
||||
});
|
||||
|
||||
test('null/undefined', () => {
|
||||
expect(blog.getMetadata(null)).toBeNull();
|
||||
expect(blog.getMetadata(undefined)).toBeNull();
|
||||
});
|
||||
|
||||
test('blog file', () => {
|
||||
const metadata = blog.getMetadata(testFile);
|
||||
expect(metadata).toMatchSnapshot();
|
||||
expect(metadata).not.toBeNull();
|
||||
expect(metadata).toHaveProperty('id');
|
||||
expect(metadata).toHaveProperty('path');
|
||||
expect(metadata).toHaveProperty('content');
|
||||
});
|
||||
});
|
||||
|
||||
describe('fileToUrl', () => {
|
||||
test('invalid file path', () => {
|
||||
expect(blog.fileToUrl(null)).toBeNull();
|
||||
expect(blog.fileToUrl(undefined)).toBeNull();
|
||||
expect(blog.fileToUrl(true)).toBeNull();
|
||||
fs.existsSync.mockReturnValueOnce(null);
|
||||
expect(blog.fileToUrl('2018-03-02-this-does-not-exist.md')).toBeNull();
|
||||
});
|
||||
|
||||
test('valid filepath', () => {
|
||||
expect(blog.fileToUrl(testFile)).toEqual('2018/08/17/docusaurus.html');
|
||||
});
|
||||
});
|
||||
|
||||
describe('urlToSource', () => {
|
||||
test('invalid url path', () => {
|
||||
expect(blog.urlToSource(null)).toBeNull();
|
||||
expect(blog.urlToSource(undefined)).toBeNull();
|
||||
expect(blog.urlToSource(true)).toBeNull();
|
||||
});
|
||||
test('valid url path', () => {
|
||||
expect(blog.urlToSource(`${blog.fileToUrl(testFile)}`)).toEqual(
|
||||
'2018-08-17-docusaurus.md',
|
||||
);
|
||||
expect(blog.urlToSource('2018/03/04/test-name-lol.html')).toEqual(
|
||||
'2018-03-04-test-name-lol.md',
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe('replaceAssetsLink', () => {
|
||||
test('transform document with valid assets link', () => {
|
||||
const doc1 = fs.readFileSync(
|
||||
path.join(__dirname, '__fixtures__', 'doc1.md'),
|
||||
'utf8',
|
||||
);
|
||||
const rawContent1 = metadataUtils.extractMetadata(doc1).rawContent;
|
||||
const content1 = replaceAssetsLink(rawContent1, '/blog');
|
||||
expect(content1).toMatchSnapshot();
|
||||
expect(content1).toContain('');
|
||||
expect(content1).toContain('');
|
||||
expect(content1).toContain('');
|
||||
expect(content1).toContain('');
|
||||
expect(content1).not.toContain('');
|
||||
expect(content1).not.toContain('');
|
||||
expect(content1).not.toContain('');
|
||||
expect(content1).not.toContain('');
|
||||
expect(content1).not.toEqual(rawContent1);
|
||||
});
|
||||
|
||||
test('does not transform document without valid assets link', () => {
|
||||
const doc2 = fs.readFileSync(
|
||||
path.join(__dirname, '__fixtures__', 'doc2.md'),
|
||||
'utf8',
|
||||
);
|
||||
const rawContent2 = metadataUtils.extractMetadata(doc2).rawContent;
|
||||
const content2 = replaceAssetsLink(rawContent2, '/blog');
|
||||
expect(content2).toMatchSnapshot();
|
||||
expect(content2).not.toContain('');
|
||||
expect(content2).not.toContain('');
|
||||
expect(content2).not.toContain('');
|
||||
expect(content2).toEqual(rawContent2);
|
||||
});
|
||||
});
|
217
packages/docusaurus-1.x/lib/server/__tests__/docs.test.js
Normal file
217
packages/docusaurus-1.x/lib/server/__tests__/docs.test.js
Normal file
|
@ -0,0 +1,217 @@
|
|||
/**
|
||||
* Copyright (c) 2017-present, Facebook, Inc.
|
||||
*
|
||||
* This source code is licensed under the MIT license found in the
|
||||
* LICENSE file in the root directory of this source tree.
|
||||
*/
|
||||
|
||||
// simulate cwd to website so all require (CWD+'/siteConfig.js') will work
|
||||
const originalCwd = process.cwd();
|
||||
if (!/website-1.x$/.test(originalCwd)) {
|
||||
process.chdir(process.cwd() + '/website-1.x');
|
||||
}
|
||||
|
||||
const path = require('path');
|
||||
const fs = require('fs-extra');
|
||||
const docs = require('../docs');
|
||||
const metadataUtils = require('../metadataUtils');
|
||||
const {replaceAssetsLink} = require('../utils.js');
|
||||
|
||||
jest.mock('../env', () => ({
|
||||
translation: {
|
||||
enabled: true,
|
||||
enabledLanguages: () => [
|
||||
{
|
||||
enabled: true,
|
||||
name: 'English',
|
||||
tag: 'en',
|
||||
},
|
||||
{
|
||||
enabled: true,
|
||||
name: '한국어',
|
||||
tag: 'ko',
|
||||
},
|
||||
],
|
||||
},
|
||||
versioning: {
|
||||
enabled: true,
|
||||
defaultVersion: '1.0.0',
|
||||
latestVersion: '1.0.0',
|
||||
},
|
||||
}));
|
||||
|
||||
const Metadata = require(path.join(__dirname, '__fixtures__', 'metadata.js'));
|
||||
|
||||
const doc1 = fs.readFileSync(
|
||||
path.join(__dirname, '__fixtures__', 'doc1.md'),
|
||||
'utf8',
|
||||
);
|
||||
|
||||
const doc2 = fs.readFileSync(
|
||||
path.join(__dirname, '__fixtures__', 'doc2.md'),
|
||||
'utf8',
|
||||
);
|
||||
|
||||
const doc3 = fs.readFileSync(
|
||||
path.join(__dirname, '__fixtures__', 'subdir', 'doc3.md'),
|
||||
'utf8',
|
||||
);
|
||||
|
||||
const refLinks = fs.readFileSync(
|
||||
path.join(__dirname, '__fixtures__', 'reflinks.md'),
|
||||
'utf8',
|
||||
);
|
||||
|
||||
const rawContent1 = metadataUtils.extractMetadata(doc1).rawContent;
|
||||
const rawContent2 = metadataUtils.extractMetadata(doc2).rawContent;
|
||||
const rawContent3 = metadataUtils.extractMetadata(doc3).rawContent;
|
||||
const rawContentRefLinks = metadataUtils.extractMetadata(refLinks).rawContent;
|
||||
|
||||
describe('mdToHtmlify', () => {
|
||||
const siteConfig = {
|
||||
baseUrl: '/',
|
||||
docsUrl: 'docs',
|
||||
};
|
||||
const mdToHtml = metadataUtils.mdToHtml(Metadata, siteConfig);
|
||||
|
||||
test('transform nothing', () => {
|
||||
const content1 = docs.mdToHtmlify(
|
||||
rawContent1,
|
||||
mdToHtml,
|
||||
Metadata['en-doc1'],
|
||||
);
|
||||
expect(content1).not.toContain('/docs/en/next/');
|
||||
expect(content1).toMatchSnapshot();
|
||||
expect(content1).toEqual(rawContent1);
|
||||
});
|
||||
|
||||
test('transform to correct link', () => {
|
||||
const content2 = docs.mdToHtmlify(
|
||||
rawContent2,
|
||||
mdToHtml,
|
||||
Metadata['en-doc2'],
|
||||
);
|
||||
expect(content2).toContain('/docs/en/next/');
|
||||
expect(content2).toMatchSnapshot();
|
||||
expect(content2).not.toEqual(rawContent2);
|
||||
});
|
||||
|
||||
test('transform link even in subdirectory', () => {
|
||||
const customMetadata = {
|
||||
'subdir-doc3': {
|
||||
id: 'subdir-doc3',
|
||||
title: 'Document 3',
|
||||
source: 'subdir/doc3.md',
|
||||
permalink: 'docs/subdir/doc3.html',
|
||||
language: 'en',
|
||||
},
|
||||
};
|
||||
const customMdToHtml = metadataUtils.mdToHtml(customMetadata, siteConfig);
|
||||
const content3 = docs.mdToHtmlify(
|
||||
rawContent3,
|
||||
customMdToHtml,
|
||||
customMetadata['subdir-doc3'],
|
||||
);
|
||||
expect(content3).toContain('/docs/subdir/doc3');
|
||||
expect(content3).not.toContain('subdir/doc3.md');
|
||||
expect(content3).toMatchSnapshot();
|
||||
expect(content3).not.toEqual(rawContent3);
|
||||
});
|
||||
|
||||
test('transforms reference links', () => {
|
||||
const contentRefLinks = docs.mdToHtmlify(
|
||||
rawContentRefLinks,
|
||||
mdToHtml,
|
||||
Metadata['en-reflinks'],
|
||||
);
|
||||
expect(contentRefLinks).toContain('/docs/en/next/');
|
||||
expect(contentRefLinks).toMatchSnapshot();
|
||||
expect(contentRefLinks).not.toEqual(rawContentRefLinks);
|
||||
});
|
||||
});
|
||||
|
||||
describe('getFile', () => {
|
||||
const fakeContent = {
|
||||
'website-1.x/translated_docs/ko/doc1.md': '이건 가짜 야',
|
||||
'website-1.x/versioned_docs/version-1.0.0/doc2.md':
|
||||
'Document 2 is not good',
|
||||
'website-1.x/translated_docs/ko/version-1.0.0/doc1.md':
|
||||
'이것은 오래된 가짜입니다.',
|
||||
'docs/doc1.md': 'Just another document',
|
||||
};
|
||||
fs.existsSync = jest.fn().mockReturnValue(true);
|
||||
fs.readFileSync = jest.fn().mockImplementation(file => {
|
||||
const fakePath = file.replace(
|
||||
process.cwd().replace(/website-1.x\/?$/, ''),
|
||||
'',
|
||||
);
|
||||
const normalizedPath = fakePath.replace(/\\/g, '/');
|
||||
return fakeContent[normalizedPath];
|
||||
});
|
||||
|
||||
test('docs does not exist', () => {
|
||||
const metadata = Metadata['en-doc1'];
|
||||
fs.existsSync.mockReturnValueOnce(null);
|
||||
expect(docs.getFile(metadata)).toBeNull();
|
||||
});
|
||||
|
||||
test('null/undefined metadata', () => {
|
||||
expect(docs.getFile(null)).toBeNull();
|
||||
expect(docs.getFile(undefined)).toBeNull();
|
||||
});
|
||||
|
||||
test('translated docs', () => {
|
||||
const metadata = Metadata['ko-doc1'];
|
||||
expect(docs.getFile(metadata)).toEqual(
|
||||
fakeContent['website-1.x/translated_docs/ko/doc1.md'],
|
||||
);
|
||||
});
|
||||
|
||||
test('versioned docs', () => {
|
||||
const metadata = Metadata['en-version-1.0.0-doc2'];
|
||||
expect(docs.getFile(metadata)).toEqual(
|
||||
fakeContent['website-1.x/versioned_docs/version-1.0.0/doc2.md'],
|
||||
);
|
||||
});
|
||||
|
||||
test('translated & versioned docs', () => {
|
||||
const metadata = Metadata['ko-version-1.0.0-doc1'];
|
||||
expect(docs.getFile(metadata)).toEqual(
|
||||
fakeContent['website-1.x/translated_docs/ko/version-1.0.0/doc1.md'],
|
||||
);
|
||||
});
|
||||
|
||||
test('normal docs', () => {
|
||||
const metadata = Metadata['en-doc1'];
|
||||
expect(docs.getFile(metadata)).toEqual(fakeContent['docs/doc1.md']);
|
||||
});
|
||||
});
|
||||
|
||||
describe('replaceAssetsLink', () => {
|
||||
test('transform document with valid assets link', () => {
|
||||
const content1 = replaceAssetsLink(rawContent1, '/docs');
|
||||
expect(content1).toMatchSnapshot();
|
||||
expect(content1).toContain('');
|
||||
expect(content1).toContain('');
|
||||
expect(content1).toContain('');
|
||||
expect(content1).toContain('');
|
||||
expect(content1).not.toContain('');
|
||||
expect(content1).not.toContain('');
|
||||
expect(content1).not.toContain('');
|
||||
expect(content1).not.toContain('');
|
||||
expect(content1).not.toEqual(rawContent1);
|
||||
});
|
||||
|
||||
test('does not transform document without valid assets link', () => {
|
||||
const content2 = replaceAssetsLink(rawContent2, '/docs');
|
||||
expect(content2).toMatchSnapshot();
|
||||
expect(content2).not.toContain('');
|
||||
expect(content2).not.toContain('');
|
||||
expect(content2).not.toContain('');
|
||||
expect(content2).toEqual(rawContent2);
|
||||
});
|
||||
});
|
||||
|
||||
afterAll(() => {
|
||||
process.chdir(originalCwd);
|
||||
});
|
|
@ -0,0 +1,26 @@
|
|||
/**
|
||||
* Copyright (c) 2017-present, Facebook, Inc.
|
||||
*
|
||||
* This source code is licensed under the MIT license found in the
|
||||
* LICENSE file in the root directory of this source tree.
|
||||
*/
|
||||
|
||||
jest.mock('gaze');
|
||||
jest.mock('../readMetadata.js');
|
||||
jest.mock('tiny-lr');
|
||||
|
||||
// When running Jest the siteConfig import fails because siteConfig doesn't exist
|
||||
// relative to the cwd of the tests. Rather than mocking out cwd just mock
|
||||
// siteConfig virtually.
|
||||
jest.mock(`${process.cwd()}/siteConfig.js`, () => jest.fn(), {virtual: true});
|
||||
|
||||
const liveReloadServer = require('../liveReloadServer.js');
|
||||
|
||||
describe('get reload script', () => {
|
||||
test('when server started, returns url with correct port', () => {
|
||||
const port = 1234;
|
||||
liveReloadServer.start(port);
|
||||
const expectedUrl = `http://localhost:${port}/livereload.js`;
|
||||
expect(liveReloadServer.getReloadScriptUrl()).toBe(expectedUrl);
|
||||
});
|
||||
});
|
|
@ -0,0 +1,66 @@
|
|||
/**
|
||||
* Copyright (c) 2017-present, Facebook, Inc.
|
||||
*
|
||||
* This source code is licensed under the MIT license found in the
|
||||
* LICENSE file in the root directory of this source tree.
|
||||
*/
|
||||
|
||||
const readCategories = require('../readCategories');
|
||||
const generalMetadata = require('./__fixtures__/metadata.js');
|
||||
const subCategoryMetadata = require('./__fixtures__/metadata-subcategories.js');
|
||||
|
||||
const languages = [
|
||||
{
|
||||
enabled: true,
|
||||
name: 'English',
|
||||
tag: 'en',
|
||||
},
|
||||
{
|
||||
enabled: true,
|
||||
name: 'Foo',
|
||||
tag: 'ko',
|
||||
},
|
||||
];
|
||||
|
||||
const languagesMultiple = [
|
||||
{
|
||||
enabled: false,
|
||||
name: 'English',
|
||||
tag: 'en',
|
||||
},
|
||||
{
|
||||
enabled: true,
|
||||
name: 'Foo',
|
||||
tag: 'ko',
|
||||
},
|
||||
];
|
||||
|
||||
describe('readCategories', () => {
|
||||
test('should return proper categories and their pages', () => {
|
||||
const categories = readCategories('docs', generalMetadata, languages);
|
||||
|
||||
expect(categories.en).toBeDefined();
|
||||
expect(categories.en.length).toBe(2);
|
||||
expect(categories.en).toMatchSnapshot();
|
||||
});
|
||||
|
||||
test('should return proper data with categories and sub categories', () => {
|
||||
const categories = readCategories('docs', subCategoryMetadata, languages);
|
||||
|
||||
expect(categories.en).toBeDefined();
|
||||
expect(categories.ko).toBeDefined();
|
||||
expect(categories.en.length).toBe(2);
|
||||
expect(categories.en).toMatchSnapshot();
|
||||
});
|
||||
|
||||
test('should return proper languages when not enabled', () => {
|
||||
const categories = readCategories(
|
||||
'docs',
|
||||
generalMetadata,
|
||||
languagesMultiple,
|
||||
);
|
||||
|
||||
expect(categories.en).not.toBeDefined();
|
||||
expect(categories.ko).toBeDefined();
|
||||
});
|
||||
});
|
|
@ -0,0 +1,43 @@
|
|||
/**
|
||||
* Copyright (c) 2017-present, Facebook, Inc.
|
||||
*
|
||||
* This source code is licensed under the MIT license found in the
|
||||
* LICENSE file in the root directory of this source tree.
|
||||
*/
|
||||
|
||||
const {readSidebar} = require('../readMetadata');
|
||||
const sidebarSubcategories = require('./__fixtures__/sidebar-subcategories');
|
||||
|
||||
jest.mock('../env', () => ({
|
||||
translation: {
|
||||
enabled: true,
|
||||
enabledLanguages: () => [
|
||||
{
|
||||
enabled: true,
|
||||
name: 'English',
|
||||
tag: 'en',
|
||||
},
|
||||
{
|
||||
enabled: true,
|
||||
name: '한국어',
|
||||
tag: 'ko',
|
||||
},
|
||||
],
|
||||
},
|
||||
versioning: {
|
||||
enabled: true,
|
||||
defaultVersion: '1.0.0',
|
||||
},
|
||||
}));
|
||||
|
||||
jest.mock(`${process.cwd()}/siteConfig.js`, () => ({}), {virtual: true});
|
||||
jest.mock(`${process.cwd()}/sidebar.json`, () => true, {virtual: true});
|
||||
|
||||
describe('readMetadata', () => {
|
||||
describe('readSidebar', () => {
|
||||
test('should verify sub category data and verify order', () => {
|
||||
const items = readSidebar(sidebarSubcategories);
|
||||
expect(items).toMatchSnapshot();
|
||||
});
|
||||
});
|
||||
});
|
187
packages/docusaurus-1.x/lib/server/__tests__/routing.test.js
Normal file
187
packages/docusaurus-1.x/lib/server/__tests__/routing.test.js
Normal file
|
@ -0,0 +1,187 @@
|
|||
/**
|
||||
* Copyright (c) 2017-present, Facebook, Inc.
|
||||
*
|
||||
* This source code is licensed under the MIT license found in the
|
||||
* LICENSE file in the root directory of this source tree.
|
||||
*/
|
||||
const routing = require('../routing.js');
|
||||
|
||||
describe('Blog routing', () => {
|
||||
const blogRegex = routing.blog({baseUrl: '/'});
|
||||
const blogRegex2 = routing.blog({baseUrl: '/react/'});
|
||||
|
||||
test('valid blog', () => {
|
||||
expect('/blog/test.html').toMatch(blogRegex);
|
||||
expect('/react/blog/test.html').toMatch(blogRegex2);
|
||||
});
|
||||
|
||||
test('invalid blog', () => {
|
||||
expect('/react/blog/test.html').not.toMatch(blogRegex);
|
||||
expect('/blog/test.html').not.toMatch(blogRegex2);
|
||||
});
|
||||
|
||||
test('assets not classified as blog', () => {
|
||||
expect('/blog/assets/any.png').not.toMatch(blogRegex);
|
||||
expect('/react/blog/assets/any.png').not.toMatch(blogRegex2);
|
||||
});
|
||||
|
||||
test('docs not classified as blog', () => {
|
||||
expect('/docs/en/blog.html').not.toMatch(blogRegex);
|
||||
expect('/docs/en/blog/blog.html').not.toMatch(blogRegex);
|
||||
expect('/react/docs/en/blog.html').not.toMatch(blogRegex2);
|
||||
expect('/react/docs/en/blog/blog.html').not.toMatch(blogRegex2);
|
||||
});
|
||||
});
|
||||
|
||||
describe('Docs routing', () => {
|
||||
const docsRegex = routing.docs({baseUrl: '/', docsUrl: 'docs'});
|
||||
const docsRegex2 = routing.docs({baseUrl: '/reason/', docsUrl: 'docs'});
|
||||
|
||||
test('valid docs', () => {
|
||||
expect('/docs/en/test.html').toMatch(docsRegex);
|
||||
expect('/reason/docs/en/test.html').toMatch(docsRegex2);
|
||||
});
|
||||
|
||||
test('invalid docs', () => {
|
||||
expect('/reason/docs/en/test.html').not.toMatch(docsRegex);
|
||||
expect('/docs/en/test.html').not.toMatch(docsRegex2);
|
||||
});
|
||||
|
||||
test('assets not classified as docs', () => {
|
||||
expect('/docs/en/notvalid.png').not.toMatch(docsRegex);
|
||||
expect('/reason/docs/en/notvalid.png').not.toMatch(docsRegex2);
|
||||
});
|
||||
|
||||
test('blog not classified as docs', () => {
|
||||
expect('/blog/docs.html').not.toMatch(docsRegex);
|
||||
expect('/blog/docs/docs.html').not.toMatch(docsRegex);
|
||||
expect('/reason/blog/docs.html').not.toMatch(docsRegex2);
|
||||
expect('/reason/blog/docs/docs.html').not.toMatch(docsRegex2);
|
||||
});
|
||||
});
|
||||
|
||||
describe('Dot routing', () => {
|
||||
const dotRegex = routing.dotfiles();
|
||||
|
||||
test('valid url with dot after last slash', () => {
|
||||
expect('/docs/en/test.23').toMatch(dotRegex);
|
||||
expect('/robots.hai.2').toMatch(dotRegex);
|
||||
expect('/blog/1.2.3').toMatch(dotRegex);
|
||||
expect('/this.is.my').toMatch(dotRegex);
|
||||
});
|
||||
|
||||
test('html file is invalid', () => {
|
||||
expect('/docs/en.html').not.toMatch(dotRegex);
|
||||
expect('/users.html').not.toMatch(dotRegex);
|
||||
expect('/blog/asdf.html').not.toMatch(dotRegex);
|
||||
expect('/end/1234/asdf.html').not.toMatch(dotRegex);
|
||||
expect('/test/lol.huam.html').not.toMatch(dotRegex);
|
||||
});
|
||||
|
||||
test('extension-less url is not valid', () => {
|
||||
expect('/reason/test').not.toMatch(dotRegex);
|
||||
expect('/asdff').not.toMatch(dotRegex);
|
||||
expect('/blog/asdf.ghg/').not.toMatch(dotRegex);
|
||||
expect('/end/1234.23.55/').not.toMatch(dotRegex);
|
||||
});
|
||||
});
|
||||
|
||||
describe('Feed routing', () => {
|
||||
const feedRegex = routing.feed({baseUrl: '/'});
|
||||
const feedRegex2 = routing.feed({baseUrl: '/reason/'});
|
||||
|
||||
test('valid feed url', () => {
|
||||
expect('/blog/atom.xml').toMatch(feedRegex);
|
||||
expect('/blog/feed.xml').toMatch(feedRegex);
|
||||
expect('/reason/blog/atom.xml').toMatch(feedRegex2);
|
||||
expect('/reason/blog/feed.xml').toMatch(feedRegex2);
|
||||
});
|
||||
|
||||
test('invalid feed url', () => {
|
||||
expect('/blog/blog/feed.xml').not.toMatch(feedRegex);
|
||||
expect('/blog/test.xml').not.toMatch(feedRegex);
|
||||
expect('/reason/blog/atom.xml').not.toMatch(feedRegex);
|
||||
expect('/reason/blog/feed.xml').not.toMatch(feedRegex);
|
||||
expect('/blog/feed.xml/test.html').not.toMatch(feedRegex);
|
||||
expect('/blog/atom.xml').not.toMatch(feedRegex2);
|
||||
expect('/blog/feed.xml').not.toMatch(feedRegex2);
|
||||
expect('/reason/blog/test.xml').not.toMatch(feedRegex2);
|
||||
expect('/reason/blog/blog/feed.xml').not.toMatch(feedRegex2);
|
||||
expect('/reason/blog/blog/atom.xml').not.toMatch(feedRegex2);
|
||||
});
|
||||
|
||||
test('not a feed', () => {
|
||||
expect('/blog/atom').not.toMatch(feedRegex);
|
||||
expect('/reason/blog/feed').not.toMatch(feedRegex2);
|
||||
});
|
||||
});
|
||||
|
||||
describe('Extension-less url routing', () => {
|
||||
const noExtRegex = routing.noExtension();
|
||||
|
||||
test('valid no extension url', () => {
|
||||
expect('/test').toMatch(noExtRegex);
|
||||
expect('/reason/test').toMatch(noExtRegex);
|
||||
});
|
||||
|
||||
test('url with file extension', () => {
|
||||
expect('/robots.txt').not.toMatch(noExtRegex);
|
||||
expect('/reason/robots.txt').not.toMatch(noExtRegex);
|
||||
expect('/docs/en/docu.html').not.toMatch(noExtRegex);
|
||||
expect('/reason/robots.html').not.toMatch(noExtRegex);
|
||||
expect('/blog/atom.xml').not.toMatch(noExtRegex);
|
||||
expect('/reason/sitemap.xml').not.toMatch(noExtRegex);
|
||||
expect('/main.css').not.toMatch(noExtRegex);
|
||||
expect('/reason/custom.css').not.toMatch(noExtRegex);
|
||||
});
|
||||
});
|
||||
|
||||
describe('Page routing', () => {
|
||||
const pageRegex = routing.page({baseUrl: '/', docsUrl: 'docs'});
|
||||
const pageRegex2 = routing.page({baseUrl: '/reason/', docsUrl: 'docs'});
|
||||
|
||||
test('valid page url', () => {
|
||||
expect('/index.html').toMatch(pageRegex);
|
||||
expect('/en/help.html').toMatch(pageRegex);
|
||||
expect('/reason/index.html').toMatch(pageRegex2);
|
||||
expect('/reason/ro/users.html').toMatch(pageRegex2);
|
||||
});
|
||||
|
||||
test('docs not considered as page', () => {
|
||||
expect('/docs/en/test.html').not.toMatch(pageRegex);
|
||||
expect('/reason/docs/en/test.html').not.toMatch(pageRegex2);
|
||||
});
|
||||
|
||||
test('blog not considered as page', () => {
|
||||
expect('/blog/index.html').not.toMatch(pageRegex);
|
||||
expect('/reason/blog/index.html').not.toMatch(pageRegex2);
|
||||
});
|
||||
|
||||
test('not a page', () => {
|
||||
expect('/yangshun.jpg').not.toMatch(pageRegex);
|
||||
expect('/reason/endilie.png').not.toMatch(pageRegex2);
|
||||
});
|
||||
});
|
||||
|
||||
describe('Sitemap routing', () => {
|
||||
const sitemapRegex = routing.sitemap({baseUrl: '/'});
|
||||
const sitemapRegex2 = routing.sitemap({baseUrl: '/reason/'});
|
||||
|
||||
test('valid sitemap url', () => {
|
||||
expect('/sitemap.xml').toMatch(sitemapRegex);
|
||||
expect('/reason/sitemap.xml').toMatch(sitemapRegex2);
|
||||
});
|
||||
|
||||
test('invalid sitemap url', () => {
|
||||
expect('/reason/sitemap.xml').not.toMatch(sitemapRegex);
|
||||
expect('/reason/sitemap.xml.html').not.toMatch(sitemapRegex);
|
||||
expect('/sitemap/sitemap.xml').not.toMatch(sitemapRegex);
|
||||
expect('/reason/sitemap/sitemap.xml').not.toMatch(sitemapRegex);
|
||||
expect('/sitemap.xml').not.toMatch(sitemapRegex2);
|
||||
});
|
||||
|
||||
test('not a sitemap', () => {
|
||||
expect('/sitemap').not.toMatch(sitemapRegex);
|
||||
expect('/reason/sitemap').not.toMatch(sitemapRegex2);
|
||||
});
|
||||
});
|
138
packages/docusaurus-1.x/lib/server/__tests__/start.test.js
Normal file
138
packages/docusaurus-1.x/lib/server/__tests__/start.test.js
Normal file
|
@ -0,0 +1,138 @@
|
|||
/**
|
||||
* Copyright (c) 2017-present, Facebook, Inc.
|
||||
*
|
||||
* This source code is licensed under the MIT license found in the
|
||||
* LICENSE file in the root directory of this source tree.
|
||||
*/
|
||||
|
||||
const program = require('commander');
|
||||
const openBrowser = require('react-dev-utils/openBrowser');
|
||||
const portFinder = require('portfinder');
|
||||
const liveReloadServer = require('../liveReloadServer.js');
|
||||
const server = require('../server.js');
|
||||
|
||||
const siteConfig = require(`${process.cwd()}/siteConfig.js`);
|
||||
|
||||
// When running Jest the siteConfig import fails because siteConfig doesn't exist
|
||||
// relative to the cwd of the tests. Rather than mocking out cwd just mock
|
||||
// siteConfig virtually.
|
||||
jest.mock(`${process.cwd()}/siteConfig.js`, () => jest.fn(), {virtual: true});
|
||||
|
||||
jest.mock('commander');
|
||||
jest.mock('react-dev-utils/openBrowser');
|
||||
jest.mock('portfinder');
|
||||
jest.mock('../liveReloadServer.js');
|
||||
jest.mock('../server.js');
|
||||
jest.mock('process');
|
||||
|
||||
console.log = jest.fn();
|
||||
|
||||
const start = require('../start.js');
|
||||
|
||||
beforeEach(() => jest.resetAllMocks());
|
||||
|
||||
describe('start live reload', () => {
|
||||
test('uses inital port 35729', () => {
|
||||
portFinder.getPortPromise.mockResolvedValue();
|
||||
start.startLiveReloadServer();
|
||||
expect(portFinder.getPortPromise).toHaveBeenCalledWith({port: 35729});
|
||||
});
|
||||
|
||||
test('when an unused port is found, starts the live reload server on that port', () => {
|
||||
expect.assertions(1);
|
||||
const unusedPort = 1234;
|
||||
portFinder.getPortPromise.mockResolvedValue(unusedPort);
|
||||
return start.startLiveReloadServer().then(() => {
|
||||
expect(liveReloadServer.start).toHaveBeenCalledWith(unusedPort);
|
||||
});
|
||||
});
|
||||
|
||||
test('when no unused port found, returns error', () => {
|
||||
expect.assertions(1);
|
||||
const unusedPortError = new Error('no unused port');
|
||||
portFinder.getPortPromise.mockRejectedValue(unusedPortError);
|
||||
return expect(start.startLiveReloadServer()).rejects.toEqual(
|
||||
unusedPortError,
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe('start server', () => {
|
||||
test('when custom port provided as parameter, uses as inital port', () => {
|
||||
const customPort = 1234;
|
||||
program.port = customPort;
|
||||
portFinder.getPortPromise.mockResolvedValue();
|
||||
start.startServer();
|
||||
expect(portFinder.getPortPromise).toBeCalledWith({port: customPort});
|
||||
delete program.port;
|
||||
});
|
||||
|
||||
test('when port environment variable set and no custom port, used as inital port', () => {
|
||||
const customPort = '4321';
|
||||
process.env.PORT = customPort;
|
||||
portFinder.getPortPromise.mockResolvedValue();
|
||||
start.startServer();
|
||||
expect(portFinder.getPortPromise).toBeCalledWith({port: customPort});
|
||||
delete process.env.PORT;
|
||||
});
|
||||
|
||||
test('when no custom port specified, uses port 3000', () => {
|
||||
portFinder.getPortPromise.mockResolvedValue();
|
||||
start.startServer();
|
||||
expect(portFinder.getPortPromise).toBeCalledWith({port: 3000});
|
||||
});
|
||||
|
||||
test('when unused port found, starts server on that port', () => {
|
||||
expect.assertions(1);
|
||||
const port = 1357;
|
||||
portFinder.getPortPromise.mockResolvedValue(port);
|
||||
return start.startServer().then(() => {
|
||||
expect(server).toHaveBeenCalledWith(port);
|
||||
});
|
||||
});
|
||||
|
||||
test('when unused port found, opens browser to server address', () => {
|
||||
expect.assertions(1);
|
||||
const baseUrl = '/base_url';
|
||||
siteConfig.baseUrl = baseUrl;
|
||||
const port = 2468;
|
||||
portFinder.getPortPromise.mockResolvedValue(port);
|
||||
const expectedServerAddress = `http://localhost:${port}${baseUrl}`;
|
||||
return start.startServer().then(() => {
|
||||
expect(openBrowser).toHaveBeenCalledWith(expectedServerAddress);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('start docusaurus', () => {
|
||||
test('when watch enabled, starts live reload server', () => {
|
||||
expect.assertions(1);
|
||||
program.watch = true;
|
||||
portFinder.getPortPromise.mockResolvedValue();
|
||||
return start.startDocusaurus().then(() => {
|
||||
expect(liveReloadServer.start).toBeCalled();
|
||||
});
|
||||
});
|
||||
|
||||
test('when live reload fails to start, server still started', () => {
|
||||
expect.assertions(1);
|
||||
program.watch = true;
|
||||
console.warn = jest.fn();
|
||||
portFinder.getPortPromise
|
||||
.mockRejectedValueOnce('could not find live reload port')
|
||||
.mockResolvedValueOnce();
|
||||
return start.startDocusaurus().then(() => {
|
||||
expect(server).toBeCalled();
|
||||
});
|
||||
});
|
||||
|
||||
test('live reload disabled, only starts docusarus server', () => {
|
||||
expect.assertions(2);
|
||||
program.watch = false;
|
||||
portFinder.getPortPromise.mockResolvedValue();
|
||||
return start.startDocusaurus().then(() => {
|
||||
expect(liveReloadServer.start).not.toBeCalled();
|
||||
expect(server).toBeCalled();
|
||||
});
|
||||
});
|
||||
});
|
91
packages/docusaurus-1.x/lib/server/__tests__/utils.test.js
Normal file
91
packages/docusaurus-1.x/lib/server/__tests__/utils.test.js
Normal file
|
@ -0,0 +1,91 @@
|
|||
/**
|
||||
* Copyright (c) 2017-present, Facebook, Inc.
|
||||
*
|
||||
* This source code is licensed under the MIT license found in the
|
||||
* LICENSE file in the root directory of this source tree.
|
||||
*/
|
||||
const path = require('path');
|
||||
const fs = require('fs');
|
||||
const utils = require('../utils');
|
||||
|
||||
jest.mock('../env', () => ({
|
||||
translation: {
|
||||
enabled: true,
|
||||
enabledLanguages: () => [
|
||||
{
|
||||
enabled: true,
|
||||
name: 'English',
|
||||
tag: 'en',
|
||||
},
|
||||
{
|
||||
enabled: true,
|
||||
name: '日本語',
|
||||
tag: 'ja',
|
||||
},
|
||||
],
|
||||
},
|
||||
}));
|
||||
|
||||
describe('server utils', () => {
|
||||
test('minify css', () => {
|
||||
const testCss = fs.readFileSync(
|
||||
path.join(__dirname, '__fixtures__', 'test.css'),
|
||||
'utf8',
|
||||
);
|
||||
const notCss = fs.readFileSync(
|
||||
path.join(__dirname, '__fixtures__', 'test.md'),
|
||||
'utf8',
|
||||
);
|
||||
utils.minifyCss(testCss).then(css => expect(css).toMatchSnapshot());
|
||||
utils.minifyCss(notCss).catch(e => expect(e).toMatchSnapshot());
|
||||
});
|
||||
|
||||
test('autoprefix css', () => {
|
||||
const testCss = fs.readFileSync(
|
||||
path.join(__dirname, '__fixtures__', 'test.css'),
|
||||
'utf8',
|
||||
);
|
||||
|
||||
utils.autoPrefixCss(testCss).then(css => expect(css).toMatchSnapshot());
|
||||
});
|
||||
|
||||
test('getLanguage', () => {
|
||||
const testDocEnglish = path.join('translated_docs', 'en', 'test.md');
|
||||
const testDocJapanese = path.join('translated_docs', 'ja', 'test.md');
|
||||
const testDocJapaneseInSubfolder = path.join(
|
||||
'translated_docs',
|
||||
'ja',
|
||||
'en',
|
||||
'test.md',
|
||||
);
|
||||
const testDocInSubfolder = path.join('docs', 'ro', 'test.md');
|
||||
const testDocNoLanguage = path.join('docs', 'test.md');
|
||||
expect(utils.getLanguage(testDocEnglish, 'translated_docs')).toBe('en');
|
||||
expect(utils.getLanguage(testDocJapanese, 'translated_docs')).toBe('ja');
|
||||
expect(
|
||||
utils.getLanguage(testDocJapaneseInSubfolder, 'translated_docs'),
|
||||
).toBe('ja');
|
||||
expect(utils.getLanguage(testDocInSubfolder, 'docs')).toBeNull();
|
||||
expect(utils.getLanguage(testDocNoLanguage, 'docs')).toBeNull();
|
||||
});
|
||||
|
||||
test('getSubdir', () => {
|
||||
const docA = path.join('docs', 'endiliey', 'a.md');
|
||||
const docB = path.join('docs', 'nus', 'hackers', 'b.md');
|
||||
const docC = path.join('docs', 'c.md');
|
||||
const docD = path.join('website', 'translated_docs', 'wow', 'd.md');
|
||||
const docE = path.join('website', 'translated_docs', 'lol', 'lah', 'e.md');
|
||||
const docsDir = path.join('docs');
|
||||
const translatedDir = path.join('website', 'translated_docs');
|
||||
expect(utils.getSubDir(docA, docsDir)).toEqual('endiliey');
|
||||
expect(utils.getSubDir(docA, translatedDir)).toBeNull();
|
||||
expect(utils.getSubDir(docB, docsDir)).toEqual('nus/hackers');
|
||||
expect(utils.getSubDir(docB, translatedDir)).toBeNull();
|
||||
expect(utils.getSubDir(docC, docsDir)).toBeNull();
|
||||
expect(utils.getSubDir(docC, translatedDir)).toBeNull();
|
||||
expect(utils.getSubDir(docD, docsDir)).toBeNull();
|
||||
expect(utils.getSubDir(docD, translatedDir)).toEqual('wow');
|
||||
expect(utils.getSubDir(docE, docsDir)).toBeNull();
|
||||
expect(utils.getSubDir(docE, translatedDir)).toEqual('lol/lah');
|
||||
});
|
||||
});
|
97
packages/docusaurus-1.x/lib/server/blog.js
Normal file
97
packages/docusaurus-1.x/lib/server/blog.js
Normal file
|
@ -0,0 +1,97 @@
|
|||
/**
|
||||
* Copyright (c) 2017-present, Facebook, Inc.
|
||||
*
|
||||
* This source code is licensed under the MIT license found in the
|
||||
* LICENSE file in the root directory of this source tree.
|
||||
*/
|
||||
const CWD = process.cwd();
|
||||
const React = require('react');
|
||||
const path = require('path');
|
||||
const fs = require('fs-extra');
|
||||
const metadataUtils = require('./metadataUtils');
|
||||
const {replaceAssetsLink} = require('./utils.js');
|
||||
const {renderToStaticMarkupWithDoctype} = require('./renderUtils');
|
||||
const loadConfig = require('./config');
|
||||
|
||||
const siteConfig = loadConfig(`${CWD}/siteConfig.js`);
|
||||
|
||||
function urlToSource(url) {
|
||||
if (!url || typeof url !== 'string') {
|
||||
return null;
|
||||
}
|
||||
return url
|
||||
.replace(/\/index.html$/, '.md')
|
||||
.replace(/\.html$/, '.md')
|
||||
.replace(new RegExp('/', 'g'), '-');
|
||||
}
|
||||
|
||||
function fileToUrl(file) {
|
||||
if (!file || !fs.existsSync(file) || typeof file !== 'string') {
|
||||
return null;
|
||||
}
|
||||
return path
|
||||
.basename(file)
|
||||
.replace('-', '/')
|
||||
.replace('-', '/')
|
||||
.replace('-', '/')
|
||||
.replace(/\.md$/, '.html');
|
||||
}
|
||||
|
||||
function getPagesMarkup(numOfBlog, config) {
|
||||
const BlogPageLayout = require('../core/BlogPageLayout.js');
|
||||
const blogPages = {};
|
||||
const perPage = 10;
|
||||
for (let page = 0; page < Math.ceil(numOfBlog / perPage); page++) {
|
||||
const metadata = {page, perPage};
|
||||
const blogPageComp = (
|
||||
<BlogPageLayout metadata={metadata} language="en" config={config} />
|
||||
);
|
||||
const str = renderToStaticMarkupWithDoctype(blogPageComp);
|
||||
const pagePath = `${page > 0 ? `page${page + 1}` : ''}/index.html`;
|
||||
blogPages[pagePath] = str;
|
||||
}
|
||||
return blogPages;
|
||||
}
|
||||
|
||||
function getMetadata(file) {
|
||||
if (!file || !fs.existsSync(file)) {
|
||||
return null;
|
||||
}
|
||||
const result = metadataUtils.extractMetadata(
|
||||
fs.readFileSync(file, {encoding: 'utf8'}),
|
||||
);
|
||||
const metadata = Object.assign(
|
||||
{
|
||||
path: fileToUrl(file),
|
||||
content: replaceAssetsLink(
|
||||
result.rawContent,
|
||||
`${siteConfig.baseUrl}blog`,
|
||||
),
|
||||
},
|
||||
result.metadata,
|
||||
);
|
||||
metadata.id = metadata.title;
|
||||
return metadata;
|
||||
}
|
||||
|
||||
function getPostMarkup(file, config) {
|
||||
const metadata = getMetadata(file);
|
||||
if (!metadata) {
|
||||
return null;
|
||||
}
|
||||
const BlogPostLayout = require('../core/BlogPostLayout.js');
|
||||
const blogPostComp = (
|
||||
<BlogPostLayout metadata={metadata} language="en" config={config}>
|
||||
{metadata.content}
|
||||
</BlogPostLayout>
|
||||
);
|
||||
return renderToStaticMarkupWithDoctype(blogPostComp);
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
fileToUrl,
|
||||
getMetadata,
|
||||
getPagesMarkup,
|
||||
getPostMarkup,
|
||||
urlToSource,
|
||||
};
|
31
packages/docusaurus-1.x/lib/server/config.js
Normal file
31
packages/docusaurus-1.x/lib/server/config.js
Normal file
|
@ -0,0 +1,31 @@
|
|||
/**
|
||||
* Copyright (c) 2017-present, Facebook, Inc.
|
||||
*
|
||||
* This source code is licensed under the MIT license found in the
|
||||
* LICENSE file in the root directory of this source tree.
|
||||
*/
|
||||
|
||||
const fs = require('fs-extra');
|
||||
|
||||
module.exports = function loadConfig(configPath, deleteCache = true) {
|
||||
if (deleteCache) {
|
||||
delete require.cache[configPath];
|
||||
}
|
||||
let config = {};
|
||||
if (fs.existsSync(configPath)) {
|
||||
config = require(configPath); // eslint-disable-line
|
||||
}
|
||||
|
||||
/* Fill default value */
|
||||
const defaultConfig = {
|
||||
customDocsPath: 'docs',
|
||||
docsUrl: 'docs',
|
||||
};
|
||||
Object.keys(defaultConfig).forEach(field => {
|
||||
if (!(field in config)) {
|
||||
config[field] = defaultConfig[field];
|
||||
}
|
||||
});
|
||||
|
||||
return config;
|
||||
};
|
158
packages/docusaurus-1.x/lib/server/docs.js
Normal file
158
packages/docusaurus-1.x/lib/server/docs.js
Normal file
|
@ -0,0 +1,158 @@
|
|||
/**
|
||||
* Copyright (c) 2017-present, Facebook, Inc.
|
||||
*
|
||||
* This source code is licensed under the MIT license found in the
|
||||
* LICENSE file in the root directory of this source tree.
|
||||
*/
|
||||
const CWD = process.cwd();
|
||||
const {join} = require('path');
|
||||
const {resolve} = require('url');
|
||||
const fs = require('fs-extra');
|
||||
const React = require('react');
|
||||
const loadConfig = require('./config');
|
||||
|
||||
const siteConfig = loadConfig(`${CWD}/siteConfig.js`);
|
||||
const env = require('./env.js');
|
||||
const {renderToStaticMarkupWithDoctype} = require('./renderUtils');
|
||||
const readMetadata = require('./readMetadata.js');
|
||||
const {insertTOC} = require('../core/toc.js');
|
||||
const {replaceAssetsLink} = require('./utils.js');
|
||||
const {getPath} = require('../core/utils.js');
|
||||
|
||||
const docsPart = `${siteConfig.docsUrl ? `${siteConfig.docsUrl}/` : ''}`;
|
||||
|
||||
function getFilePath(metadata) {
|
||||
if (!metadata) {
|
||||
return null;
|
||||
}
|
||||
let file;
|
||||
if (env.versioning.enabled && metadata.original_id) {
|
||||
if (env.translation.enabled && metadata.language !== 'en') {
|
||||
file = join(CWD, 'translated_docs', metadata.language, metadata.source);
|
||||
} else {
|
||||
file = join(CWD, 'versioned_docs', metadata.source);
|
||||
}
|
||||
} else if (env.translation.enabled && metadata.language !== 'en') {
|
||||
file = join(CWD, 'translated_docs', metadata.language, metadata.source);
|
||||
} else {
|
||||
file = join(CWD, '..', readMetadata.getDocsPath(), metadata.source);
|
||||
}
|
||||
return file;
|
||||
}
|
||||
|
||||
function getFile(metadata) {
|
||||
if (!metadata) {
|
||||
return null;
|
||||
}
|
||||
const file = getFilePath(metadata);
|
||||
if (!fs.existsSync(file)) {
|
||||
return null;
|
||||
}
|
||||
return fs.readFileSync(file, 'utf8');
|
||||
}
|
||||
|
||||
function mdToHtmlify(oldContent, mdToHtml, metadata) {
|
||||
/* Store broken links */
|
||||
const mdBrokenLinks = [];
|
||||
|
||||
let content = oldContent;
|
||||
/* Replace internal markdown linking (except in fenced blocks) */
|
||||
let fencedBlock = false;
|
||||
const lines = content.split('\n').map(line => {
|
||||
if (line.trim().startsWith('```')) {
|
||||
fencedBlock = !fencedBlock;
|
||||
}
|
||||
if (fencedBlock) return line;
|
||||
|
||||
let modifiedLine = line;
|
||||
/* Replace inline-style links or reference-style links e.g:
|
||||
This is [Document 1](doc1.md) -> we replace this doc1.md with correct link
|
||||
[doc1]: doc1.md -> we replace this doc1.md with correct link
|
||||
*/
|
||||
const mdRegex = /(?:(?:\]\()|(?:\]:\s?))(?!https)([^'")\]\s>]+\.md)/g;
|
||||
let mdMatch = mdRegex.exec(modifiedLine);
|
||||
while (mdMatch !== null) {
|
||||
/* Replace it to correct html link */
|
||||
const docsSource = metadata.version
|
||||
? metadata.source.replace(/version-.*?\//, '')
|
||||
: metadata.source;
|
||||
let htmlLink =
|
||||
mdToHtml[resolve(docsSource, mdMatch[1])] || mdToHtml[mdMatch[1]];
|
||||
if (htmlLink) {
|
||||
htmlLink = getPath(htmlLink, siteConfig.cleanUrl);
|
||||
htmlLink = htmlLink.replace('/en/', `/${metadata.language}/`);
|
||||
htmlLink = htmlLink.replace(
|
||||
'/VERSION/',
|
||||
metadata.version && metadata.version !== env.versioning.latestVersion
|
||||
? `/${metadata.version}/`
|
||||
: '/',
|
||||
);
|
||||
modifiedLine = modifiedLine.replace(mdMatch[1], htmlLink);
|
||||
} else {
|
||||
mdBrokenLinks.push(mdMatch[1]);
|
||||
}
|
||||
mdMatch = mdRegex.exec(modifiedLine);
|
||||
}
|
||||
return modifiedLine;
|
||||
});
|
||||
content = lines.join('\n');
|
||||
|
||||
if (mdBrokenLinks.length) {
|
||||
console.log(
|
||||
`[WARN] unresolved links in file '${metadata.source}' >`,
|
||||
mdBrokenLinks,
|
||||
);
|
||||
}
|
||||
return content;
|
||||
}
|
||||
|
||||
function getMarkup(rawContent, mdToHtml, metadata) {
|
||||
// generate table of contents
|
||||
let content = insertTOC(rawContent);
|
||||
|
||||
// replace any links to markdown files to their website html links
|
||||
content = mdToHtmlify(content, mdToHtml, metadata);
|
||||
|
||||
// replace any relative links to static assets (not in fenced code blocks) to absolute links
|
||||
const docsAssetsLocation = siteConfig.docsUrl
|
||||
? `${siteConfig.baseUrl}${siteConfig.docsUrl}`
|
||||
: siteConfig.baseUrl.substring(0, siteConfig.baseUrl.length - 1);
|
||||
content = replaceAssetsLink(content, docsAssetsLocation);
|
||||
|
||||
const DocsLayout = require('../core/DocsLayout.js');
|
||||
return renderToStaticMarkupWithDoctype(
|
||||
<DocsLayout
|
||||
metadata={metadata}
|
||||
language={metadata.language}
|
||||
config={siteConfig}>
|
||||
{content}
|
||||
</DocsLayout>,
|
||||
);
|
||||
}
|
||||
|
||||
function getRedirectMarkup(metadata) {
|
||||
if (
|
||||
!env.translation.enabled ||
|
||||
!metadata.permalink.includes(`${docsPart}en`)
|
||||
) {
|
||||
return null;
|
||||
}
|
||||
const Redirect = require('../core/Redirect.js');
|
||||
const redirectlink = getPath(metadata.permalink, siteConfig.cleanUrl);
|
||||
return renderToStaticMarkupWithDoctype(
|
||||
<Redirect
|
||||
metadata={metadata}
|
||||
language={metadata.language}
|
||||
config={siteConfig}
|
||||
redirect={siteConfig.baseUrl + redirectlink}
|
||||
/>,
|
||||
);
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
getMarkup,
|
||||
getFile,
|
||||
getFilePath,
|
||||
getRedirectMarkup,
|
||||
mdToHtmlify,
|
||||
};
|
85
packages/docusaurus-1.x/lib/server/env.js
Normal file
85
packages/docusaurus-1.x/lib/server/env.js
Normal file
|
@ -0,0 +1,85 @@
|
|||
/**
|
||||
* Copyright (c) 2017-present, Facebook, Inc.
|
||||
*
|
||||
* This source code is licensed under the MIT license found in the
|
||||
* LICENSE file in the root directory of this source tree.
|
||||
*/
|
||||
|
||||
const CWD = process.cwd();
|
||||
const fs = require('fs-extra');
|
||||
const path = require('path');
|
||||
const chalk = require('chalk');
|
||||
|
||||
const siteConfig = require(`${CWD}/siteConfig.js`);
|
||||
|
||||
const join = path.join;
|
||||
|
||||
const languagesFile = join(CWD, 'languages.js');
|
||||
const versionsJSONFile = join(CWD, 'versions.json');
|
||||
const versionsFile = join(CWD, 'pages/en/versions.js');
|
||||
|
||||
class Translation {
|
||||
constructor() {
|
||||
this.enabled = false;
|
||||
this.languages = [
|
||||
{
|
||||
enabled: true,
|
||||
name: 'English',
|
||||
tag: 'en',
|
||||
},
|
||||
];
|
||||
|
||||
this.load();
|
||||
}
|
||||
|
||||
enabledLanguages = () => this.languages.filter(lang => lang.enabled);
|
||||
|
||||
load() {
|
||||
if (fs.existsSync(languagesFile)) {
|
||||
this.enabled = true;
|
||||
this.languages = require(languagesFile);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
class Versioning {
|
||||
constructor() {
|
||||
this.enabled = false;
|
||||
this.latestVersion = null;
|
||||
this.defaultVersion = null;
|
||||
this.versions = [];
|
||||
this.missingVersionsPage = false;
|
||||
|
||||
this.load();
|
||||
}
|
||||
|
||||
printMissingVersionsPageError() {
|
||||
console.error(
|
||||
`${chalk.yellow('No versions.js file found!')}` +
|
||||
`\nYou should create your versions.js file in pages/en directory.` +
|
||||
`\nPlease refer to https://docusaurus.io/docs/en/versioning.html.`,
|
||||
);
|
||||
}
|
||||
|
||||
load() {
|
||||
if (fs.existsSync(versionsJSONFile)) {
|
||||
this.enabled = true;
|
||||
this.versions = JSON.parse(fs.readFileSync(versionsJSONFile, 'utf8'));
|
||||
this.latestVersion = this.versions[0];
|
||||
this.defaultVersion = siteConfig.defaultVersionShown
|
||||
? siteConfig.defaultVersionShown
|
||||
: this.latestVersion; // otherwise show the latest version (other than next/master)
|
||||
}
|
||||
|
||||
if (!fs.existsSync(versionsFile)) {
|
||||
this.missingVersionsPage = true;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const env = {
|
||||
translation: new Translation(),
|
||||
versioning: new Versioning(),
|
||||
};
|
||||
|
||||
module.exports = env;
|
65
packages/docusaurus-1.x/lib/server/feed.js
Normal file
65
packages/docusaurus-1.x/lib/server/feed.js
Normal file
|
@ -0,0 +1,65 @@
|
|||
/**
|
||||
* Copyright (c) 2017-present, Facebook, Inc.
|
||||
*
|
||||
* This source code is licensed under the MIT license found in the
|
||||
* LICENSE file in the root directory of this source tree.
|
||||
*/
|
||||
|
||||
const Feed = require('feed');
|
||||
const truncateHtml = require('truncate-html');
|
||||
|
||||
const BLOG_POST_SUMMARY_LENGTH = 250;
|
||||
|
||||
const CWD = process.cwd();
|
||||
const siteConfig = require(`${CWD}/siteConfig.js`);
|
||||
const readMetadata = require('./readMetadata.js');
|
||||
|
||||
const blogRootURL = `${siteConfig.url + siteConfig.baseUrl}blog`;
|
||||
const siteImageURL =
|
||||
siteConfig.url + siteConfig.baseUrl + siteConfig.headerIcon;
|
||||
const utils = require('../core/utils');
|
||||
|
||||
const renderMarkdown = require('../core/renderMarkdown.js');
|
||||
|
||||
module.exports = function(type) {
|
||||
console.log('feed.js triggered...');
|
||||
|
||||
type = type || 'rss';
|
||||
|
||||
readMetadata.generateMetadataBlog();
|
||||
const MetadataBlog = require('../core/MetadataBlog.js');
|
||||
|
||||
const feed = new Feed({
|
||||
title: `${siteConfig.title} Blog`,
|
||||
description: `The best place to stay up-to-date with the latest ${
|
||||
siteConfig.title
|
||||
} news and events.`,
|
||||
id: blogRootURL,
|
||||
link: blogRootURL,
|
||||
image: siteImageURL,
|
||||
copyright: siteConfig.copyright,
|
||||
updated: new Date(MetadataBlog[0].date),
|
||||
});
|
||||
|
||||
MetadataBlog.forEach(post => {
|
||||
const url = `${blogRootURL}/${post.path}`;
|
||||
const description = utils.blogPostHasTruncateMarker(post.content)
|
||||
? renderMarkdown(utils.extractBlogPostBeforeTruncate(post.content))
|
||||
: truncateHtml(renderMarkdown(post.content), BLOG_POST_SUMMARY_LENGTH);
|
||||
|
||||
feed.addItem({
|
||||
title: post.title,
|
||||
link: url,
|
||||
author: [
|
||||
{
|
||||
name: post.author,
|
||||
link: post.authorURL,
|
||||
},
|
||||
],
|
||||
date: new Date(post.date),
|
||||
description,
|
||||
});
|
||||
});
|
||||
|
||||
return type === 'rss' ? feed.rss2() : feed.atom1();
|
||||
};
|
417
packages/docusaurus-1.x/lib/server/generate.js
Normal file
417
packages/docusaurus-1.x/lib/server/generate.js
Normal file
|
@ -0,0 +1,417 @@
|
|||
/**
|
||||
* Copyright (c) 2017-present, Facebook, Inc.
|
||||
*
|
||||
* This source code is licensed under the MIT license found in the
|
||||
* LICENSE file in the root directory of this source tree.
|
||||
*/
|
||||
|
||||
async function execute() {
|
||||
require('../write-translations.js');
|
||||
const metadataUtils = require('./metadataUtils');
|
||||
const blog = require('./blog');
|
||||
const docs = require('./docs');
|
||||
const CWD = process.cwd();
|
||||
const fs = require('fs-extra');
|
||||
const readMetadata = require('./readMetadata.js');
|
||||
const path = require('path');
|
||||
const {minifyCss, isSeparateCss, autoPrefixCss} = require('./utils');
|
||||
const React = require('react');
|
||||
const mkdirp = require('mkdirp');
|
||||
const glob = require('glob');
|
||||
const chalk = require('chalk');
|
||||
const Site = require('../core/Site.js');
|
||||
const env = require('./env.js');
|
||||
const loadConfig = require('./config.js');
|
||||
const siteConfig = loadConfig(`${CWD}/siteConfig.js`);
|
||||
const translate = require('./translate.js');
|
||||
const feed = require('./feed.js');
|
||||
const sitemap = require('./sitemap.js');
|
||||
const join = path.join;
|
||||
const sep = path.sep;
|
||||
const escapeStringRegexp = require('escape-string-regexp');
|
||||
const {renderToStaticMarkupWithDoctype} = require('./renderUtils');
|
||||
const commander = require('commander');
|
||||
const imagemin = require('imagemin');
|
||||
const imageminJpegtran = require('imagemin-jpegtran');
|
||||
const imageminOptipng = require('imagemin-optipng');
|
||||
const imageminSvgo = require('imagemin-svgo');
|
||||
const imageminGifsicle = require('imagemin-gifsicle');
|
||||
|
||||
commander.option('--skip-image-compression').parse(process.argv);
|
||||
|
||||
// create the folder path for a file if it does not exist, then write the file
|
||||
function writeFileAndCreateFolder(file, content) {
|
||||
mkdirp.sync(path.dirname(file));
|
||||
fs.writeFileSync(file, content);
|
||||
|
||||
// build extra file for extension-less url if "cleanUrl" siteConfig is true
|
||||
if (siteConfig.cleanUrl && file.indexOf('index.html') === -1) {
|
||||
const extraFile = file.replace(/\.html$/, '/index.html');
|
||||
mkdirp.sync(path.dirname(extraFile));
|
||||
fs.writeFileSync(extraFile, content);
|
||||
}
|
||||
}
|
||||
|
||||
console.log('generate.js triggered...');
|
||||
|
||||
readMetadata.generateMetadataDocs();
|
||||
const Metadata = require('../core/metadata.js');
|
||||
|
||||
// TODO: what if the project is a github org page? We should not use
|
||||
// siteConfig.projectName in this case. Otherwise a GitHub org doc URL would
|
||||
// look weird: https://myorg.github.io/myorg/docs
|
||||
|
||||
// TODO: siteConfig.projectName is a misnomer. The actual project name is
|
||||
// `title`. `projectName` is only used to generate a folder, which isn't
|
||||
// needed when the project's a GitHub org page
|
||||
|
||||
const buildDir = join(CWD, 'build', siteConfig.projectName);
|
||||
fs.removeSync(join(CWD, 'build'));
|
||||
|
||||
// create html files for all docs by going through all doc ids
|
||||
const mdToHtml = metadataUtils.mdToHtml(Metadata, siteConfig);
|
||||
Object.keys(Metadata).forEach(id => {
|
||||
const metadata = Metadata[id];
|
||||
const file = docs.getFile(metadata);
|
||||
if (!file) {
|
||||
return;
|
||||
}
|
||||
const rawContent = metadataUtils.extractMetadata(file).rawContent;
|
||||
const str = docs.getMarkup(rawContent, mdToHtml, metadata);
|
||||
const targetFile = join(buildDir, metadata.permalink);
|
||||
writeFileAndCreateFolder(targetFile, str);
|
||||
|
||||
// generate english page redirects when languages are enabled
|
||||
const redirectMarkup = docs.getRedirectMarkup(metadata);
|
||||
if (!redirectMarkup) {
|
||||
return;
|
||||
}
|
||||
const docsPart = `${siteConfig.docsUrl ? `${siteConfig.docsUrl}/` : ''}`;
|
||||
const redirectFile = join(
|
||||
buildDir,
|
||||
metadata.permalink.replace(
|
||||
new RegExp(`^${docsPart}en`),
|
||||
siteConfig.docsUrl,
|
||||
),
|
||||
);
|
||||
writeFileAndCreateFolder(redirectFile, redirectMarkup);
|
||||
});
|
||||
|
||||
// copy docs assets if they exist
|
||||
if (fs.existsSync(join(CWD, '..', readMetadata.getDocsPath(), 'assets'))) {
|
||||
fs.copySync(
|
||||
join(CWD, '..', readMetadata.getDocsPath(), 'assets'),
|
||||
join(buildDir, siteConfig.docsUrl, 'assets'),
|
||||
);
|
||||
}
|
||||
|
||||
// create html files for all blog posts (each article)
|
||||
if (fs.existsSync(join(__dirname, '..', 'core', 'MetadataBlog.js'))) {
|
||||
fs.removeSync(join(__dirname, '..', 'core', 'MetadataBlog.js'));
|
||||
}
|
||||
readMetadata.generateMetadataBlog();
|
||||
const MetadataBlog = require('../core/MetadataBlog.js');
|
||||
|
||||
let files = glob.sync(join(CWD, 'blog', '**', '*.*'));
|
||||
files
|
||||
.sort()
|
||||
.reverse()
|
||||
.forEach(file => {
|
||||
// Why normalize? In case we are on Windows.
|
||||
// Remember the nuance of glob: https://www.npmjs.com/package/glob#windows
|
||||
const normalizedFile = path.normalize(file);
|
||||
const extension = path.extname(normalizedFile);
|
||||
if (extension !== '.md' && extension !== '.markdown') {
|
||||
return;
|
||||
}
|
||||
const urlPath = blog.fileToUrl(normalizedFile);
|
||||
const blogPost = blog.getPostMarkup(normalizedFile, siteConfig);
|
||||
if (!blogPost) {
|
||||
return;
|
||||
}
|
||||
const targetFile = join(buildDir, 'blog', urlPath);
|
||||
writeFileAndCreateFolder(targetFile, blogPost);
|
||||
});
|
||||
|
||||
// create html files for all blog pages (collections of article previews)
|
||||
const blogPages = blog.getPagesMarkup(MetadataBlog.length, siteConfig);
|
||||
Object.keys(blogPages).forEach(pagePath => {
|
||||
const targetFile = join(buildDir, 'blog', pagePath);
|
||||
writeFileAndCreateFolder(targetFile, blogPages[pagePath]);
|
||||
});
|
||||
|
||||
// create rss files for all blog pages, if there are any blog files
|
||||
if (MetadataBlog.length > 0) {
|
||||
let targetFile = join(buildDir, 'blog', 'feed.xml');
|
||||
writeFileAndCreateFolder(targetFile, feed());
|
||||
targetFile = join(buildDir, 'blog', 'atom.xml');
|
||||
writeFileAndCreateFolder(targetFile, feed('atom'));
|
||||
}
|
||||
|
||||
// create sitemap
|
||||
if (MetadataBlog.length > 0 || Object.keys(Metadata).length > 0) {
|
||||
sitemap((err, xml) => {
|
||||
if (!err) {
|
||||
const targetFile = join(buildDir, 'sitemap.xml');
|
||||
writeFileAndCreateFolder(targetFile, xml);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
// copy blog assets if they exist
|
||||
if (fs.existsSync(join(CWD, 'blog', 'assets'))) {
|
||||
fs.copySync(join(CWD, 'blog', 'assets'), join(buildDir, 'blog', 'assets'));
|
||||
}
|
||||
|
||||
// copy all static files from docusaurus
|
||||
const libStaticDir = join(__dirname, '..', 'static');
|
||||
files = glob.sync(join(libStaticDir, '**'));
|
||||
files.forEach(file => {
|
||||
// Why normalize? In case we are on Windows.
|
||||
// Remember the nuance of glob: https://www.npmjs.com/package/glob#windows
|
||||
const targetFile = path.normalize(file).replace(libStaticDir, buildDir);
|
||||
// parse css files to replace colors according to siteConfig
|
||||
if (file.match(/\.css$/)) {
|
||||
let cssContent = fs.readFileSync(file, 'utf8');
|
||||
|
||||
if (
|
||||
!siteConfig.colors ||
|
||||
!siteConfig.colors.primaryColor ||
|
||||
!siteConfig.colors.secondaryColor
|
||||
) {
|
||||
console.error(
|
||||
`${chalk.yellow(
|
||||
'Missing color configuration.',
|
||||
)} Make sure siteConfig.colors includes primaryColor and secondaryColor fields.`,
|
||||
);
|
||||
}
|
||||
|
||||
Object.keys(siteConfig.colors).forEach(key => {
|
||||
const color = siteConfig.colors[key];
|
||||
cssContent = cssContent.replace(new RegExp(`\\$${key}`, 'g'), color);
|
||||
});
|
||||
|
||||
if (siteConfig.fonts) {
|
||||
Object.keys(siteConfig.fonts).forEach(key => {
|
||||
const fontString = siteConfig.fonts[key]
|
||||
.map(font => `"${font}"`)
|
||||
.join(', ');
|
||||
cssContent = cssContent.replace(
|
||||
new RegExp(`\\$${key}`, 'g'),
|
||||
fontString,
|
||||
);
|
||||
});
|
||||
}
|
||||
|
||||
mkdirp.sync(path.dirname(targetFile));
|
||||
fs.writeFileSync(targetFile, cssContent);
|
||||
} else if (!fs.lstatSync(file).isDirectory()) {
|
||||
mkdirp.sync(path.dirname(targetFile));
|
||||
fs.copySync(file, targetFile);
|
||||
}
|
||||
});
|
||||
|
||||
// Copy all static files from user.
|
||||
const userStaticDir = join(CWD, 'static');
|
||||
files = glob.sync(join(userStaticDir, '**'), {dot: true});
|
||||
files.forEach(file => {
|
||||
// Why normalize? In case we are on Windows.
|
||||
// Remember the nuance of glob: https://www.npmjs.com/package/glob#windows
|
||||
const normalizedFile = path.normalize(file);
|
||||
// parse css files to replace colors and fonts according to siteConfig
|
||||
if (
|
||||
normalizedFile.match(/\.css$/) &&
|
||||
!isSeparateCss(normalizedFile, siteConfig.separateCss)
|
||||
) {
|
||||
const mainCss = join(buildDir, 'css', 'main.css');
|
||||
let cssContent = fs.readFileSync(normalizedFile, 'utf8');
|
||||
cssContent = `${fs.readFileSync(mainCss, 'utf8')}\n${cssContent}`;
|
||||
|
||||
Object.keys(siteConfig.colors).forEach(key => {
|
||||
const color = siteConfig.colors[key];
|
||||
cssContent = cssContent.replace(new RegExp(`\\$${key}`, 'g'), color);
|
||||
});
|
||||
|
||||
if (siteConfig.fonts) {
|
||||
Object.keys(siteConfig.fonts).forEach(key => {
|
||||
const fontString = siteConfig.fonts[key]
|
||||
.map(font => `"${font}"`)
|
||||
.join(', ');
|
||||
cssContent = cssContent.replace(
|
||||
new RegExp(`\\$${key}`, 'g'),
|
||||
fontString,
|
||||
);
|
||||
});
|
||||
}
|
||||
|
||||
fs.writeFileSync(mainCss, cssContent);
|
||||
} else if (
|
||||
normalizedFile.match(/\.png$|.jpg$|.svg$|.gif$/) &&
|
||||
!commander.skipImageCompression
|
||||
) {
|
||||
const parts = normalizedFile.split(`${sep}static${sep}`);
|
||||
const targetFile = join(buildDir, parts[1]);
|
||||
const targetDirectory = path.dirname(targetFile);
|
||||
mkdirp.sync(targetDirectory);
|
||||
imagemin([normalizedFile], targetDirectory, {
|
||||
use: [
|
||||
imageminOptipng(),
|
||||
imageminJpegtran(),
|
||||
imageminSvgo({
|
||||
plugins: [{removeViewBox: false}],
|
||||
}),
|
||||
imageminGifsicle(),
|
||||
],
|
||||
}).catch(error => {
|
||||
// if image compression fail, just copy it as it is
|
||||
console.error(error);
|
||||
fs.copySync(normalizedFile, targetFile);
|
||||
});
|
||||
} else if (!fs.lstatSync(normalizedFile).isDirectory()) {
|
||||
const targetFile = normalizedFile.replace(userStaticDir, buildDir);
|
||||
mkdirp.sync(path.dirname(targetFile));
|
||||
fs.copySync(normalizedFile, targetFile);
|
||||
}
|
||||
});
|
||||
|
||||
// Use cssnano to minify the final combined CSS.
|
||||
// Use autoprefixer to add vendor prefixes
|
||||
const mainCss = join(buildDir, 'css', 'main.css');
|
||||
const cssContent = fs.readFileSync(mainCss, 'utf8');
|
||||
const minifiedCSS = await minifyCss(cssContent);
|
||||
const css = await autoPrefixCss(minifiedCSS);
|
||||
fs.writeFileSync(mainCss, css);
|
||||
|
||||
// compile/copy pages from user
|
||||
const enabledLanguages = env.translation
|
||||
.enabledLanguages()
|
||||
.map(lang => lang.tag);
|
||||
const userPagesDir = join(CWD, 'pages');
|
||||
files = glob.sync(join(userPagesDir, '**'));
|
||||
files.forEach(file => {
|
||||
// Why normalize? In case we are on Windows.
|
||||
// Remember the nuance of glob: https://www.npmjs.com/package/glob#windows
|
||||
const normalizedFile = path.normalize(file);
|
||||
const relativeFile = normalizedFile.replace(userPagesDir, '');
|
||||
// render .js files to strings
|
||||
if (normalizedFile.match(/\.js$/)) {
|
||||
const pageID = path.basename(normalizedFile, '.js');
|
||||
|
||||
// make temp file for sake of require paths
|
||||
let tempFile = join(__dirname, '..', 'pages', relativeFile);
|
||||
tempFile = tempFile.replace(
|
||||
path.basename(normalizedFile),
|
||||
`temp${path.basename(normalizedFile)}`,
|
||||
);
|
||||
mkdirp.sync(path.dirname(tempFile));
|
||||
fs.copySync(normalizedFile, tempFile);
|
||||
|
||||
const ReactComp = require(tempFile);
|
||||
|
||||
let targetFile = join(buildDir, relativeFile);
|
||||
targetFile = targetFile.replace(/\.js$/, '.html');
|
||||
|
||||
const regexLang = new RegExp(
|
||||
`${escapeStringRegexp(`${userPagesDir}${sep}`)}(.*)${escapeStringRegexp(
|
||||
sep,
|
||||
)}`,
|
||||
);
|
||||
const match = regexLang.exec(normalizedFile);
|
||||
const langParts = match[1].split(sep);
|
||||
if (langParts.indexOf('en') !== -1) {
|
||||
// Copy and compile a page for each enabled language from the English file.
|
||||
for (let i = 0; i < enabledLanguages.length; i++) {
|
||||
const language = enabledLanguages[i];
|
||||
// Skip conversion from English file if a file exists for this language.
|
||||
if (
|
||||
language === 'en' ||
|
||||
!fs.existsSync(
|
||||
normalizedFile.replace(`${sep}en${sep}`, sep + language + sep),
|
||||
)
|
||||
) {
|
||||
translate.setLanguage(language);
|
||||
const str = renderToStaticMarkupWithDoctype(
|
||||
<Site
|
||||
language={language}
|
||||
config={siteConfig}
|
||||
title={ReactComp.title}
|
||||
description={ReactComp.description}
|
||||
metadata={{id: pageID}}>
|
||||
<ReactComp config={siteConfig} language={language} />
|
||||
</Site>,
|
||||
);
|
||||
writeFileAndCreateFolder(
|
||||
// TODO: use path functions
|
||||
targetFile.replace(`${sep}en${sep}`, sep + language + sep),
|
||||
str,
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
// write to base level
|
||||
const language = env.translation.enabled ? 'en' : '';
|
||||
translate.setLanguage(language);
|
||||
const str = renderToStaticMarkupWithDoctype(
|
||||
<Site
|
||||
title={ReactComp.title}
|
||||
language={language}
|
||||
config={siteConfig}
|
||||
description={ReactComp.description}
|
||||
metadata={{id: pageID}}>
|
||||
<ReactComp config={siteConfig} language={language} />
|
||||
</Site>,
|
||||
);
|
||||
writeFileAndCreateFolder(
|
||||
targetFile.replace(`${sep}en${sep}`, sep),
|
||||
str,
|
||||
);
|
||||
} else {
|
||||
// allow for rendering of other files not in pages/en folder
|
||||
const language = env.translation.enabled ? 'en' : '';
|
||||
translate.setLanguage(language);
|
||||
const str = renderToStaticMarkupWithDoctype(
|
||||
<Site
|
||||
title={ReactComp.title}
|
||||
language={language}
|
||||
config={siteConfig}
|
||||
description={ReactComp.description}
|
||||
metadata={{id: pageID}}>
|
||||
<ReactComp config={siteConfig} language={language} />
|
||||
</Site>,
|
||||
);
|
||||
writeFileAndCreateFolder(
|
||||
targetFile.replace(`${sep}en${sep}`, sep),
|
||||
str,
|
||||
);
|
||||
}
|
||||
fs.removeSync(tempFile);
|
||||
} else if (siteConfig.wrapPagesHTML && normalizedFile.match(/\.html$/)) {
|
||||
const pageID = path.basename(normalizedFile, '.html');
|
||||
const targetFile = join(buildDir, relativeFile);
|
||||
const str = renderToStaticMarkupWithDoctype(
|
||||
<Site language="en" config={siteConfig} metadata={{id: pageID}}>
|
||||
<div
|
||||
dangerouslySetInnerHTML={{
|
||||
__html: fs.readFileSync(normalizedFile, {encoding: 'utf8'}),
|
||||
}}
|
||||
/>
|
||||
</Site>,
|
||||
);
|
||||
|
||||
writeFileAndCreateFolder(targetFile, str);
|
||||
} else if (!fs.lstatSync(normalizedFile).isDirectory()) {
|
||||
// copy other non .js files
|
||||
const targetFile = join(buildDir, relativeFile);
|
||||
mkdirp.sync(path.dirname(targetFile));
|
||||
fs.copySync(normalizedFile, targetFile);
|
||||
}
|
||||
});
|
||||
|
||||
// Generate CNAME file if a custom domain is specified in siteConfig
|
||||
if (siteConfig.cname) {
|
||||
const targetFile = join(buildDir, 'CNAME');
|
||||
fs.writeFileSync(targetFile, siteConfig.cname);
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = execute;
|
38
packages/docusaurus-1.x/lib/server/liveReloadServer.js
Normal file
38
packages/docusaurus-1.x/lib/server/liveReloadServer.js
Normal file
|
@ -0,0 +1,38 @@
|
|||
/**
|
||||
* Copyright (c) 2017-present, Facebook, Inc.
|
||||
*
|
||||
* This source code is licensed under the MIT license found in the
|
||||
* LICENSE file in the root directory of this source tree.
|
||||
*/
|
||||
|
||||
const gaze = require('gaze');
|
||||
const tinylr = require('tiny-lr');
|
||||
const readMetadata = require('./readMetadata.js');
|
||||
|
||||
function start(port) {
|
||||
process.env.NODE_ENV = 'development';
|
||||
process.env.LIVERELOAD_PORT = port;
|
||||
const server = tinylr();
|
||||
server.listen(port, () => {
|
||||
console.log('LiveReload server started on port %d', port);
|
||||
});
|
||||
|
||||
gaze(
|
||||
[`../${readMetadata.getDocsPath()}/**/*`, '**/*', '!node_modules/**/*'],
|
||||
function() {
|
||||
this.on('all', () => {
|
||||
server.notifyClients(['/']);
|
||||
});
|
||||
},
|
||||
);
|
||||
}
|
||||
|
||||
const getReloadScriptUrl = () => {
|
||||
const port = process.env.LIVERELOAD_PORT;
|
||||
return `http://localhost:${port}/livereload.js`;
|
||||
};
|
||||
|
||||
module.exports = {
|
||||
start,
|
||||
getReloadScriptUrl,
|
||||
};
|
95
packages/docusaurus-1.x/lib/server/metadataUtils.js
Normal file
95
packages/docusaurus-1.x/lib/server/metadataUtils.js
Normal file
|
@ -0,0 +1,95 @@
|
|||
/**
|
||||
* Copyright (c) 2017-present, Facebook, Inc.
|
||||
*
|
||||
* This source code is licensed under the MIT license found in the
|
||||
* LICENSE file in the root directory of this source tree.
|
||||
*/
|
||||
|
||||
// split markdown header
|
||||
function splitHeader(content) {
|
||||
// New line characters need to handle all operating systems.
|
||||
const lines = content.split(/\r?\n/);
|
||||
if (lines[0] !== '---') {
|
||||
return {};
|
||||
}
|
||||
let i = 1;
|
||||
for (; i < lines.length - 1; ++i) {
|
||||
if (lines[i] === '---') {
|
||||
break;
|
||||
}
|
||||
}
|
||||
return {
|
||||
header: lines.slice(1, i + 1).join('\n'),
|
||||
content: lines.slice(i + 1).join('\n'),
|
||||
};
|
||||
}
|
||||
|
||||
// Extract markdown metadata header
|
||||
function extractMetadata(content) {
|
||||
const metadata = {};
|
||||
const both = splitHeader(content);
|
||||
|
||||
// if no content returned, then that means there was no header, and both.header is the content
|
||||
if (!both.content) {
|
||||
if (!both.header) {
|
||||
// if no both returned, then that means there was no header and no content => we return the current content of the file
|
||||
return {metadata, rawContent: content};
|
||||
}
|
||||
return {metadata, rawContent: both.header};
|
||||
}
|
||||
|
||||
// New line characters => to handle all operating systems.
|
||||
const lines = both.header.split(/\r?\n/);
|
||||
|
||||
// Loop that add to metadata the current content of the fields of the header
|
||||
// Like the format:
|
||||
// id:
|
||||
// title:
|
||||
// original_id:
|
||||
for (let i = 0; i < lines.length - 1; ++i) {
|
||||
const keyvalue = lines[i].split(':');
|
||||
const key = keyvalue[0].trim();
|
||||
let value = keyvalue
|
||||
.slice(1)
|
||||
.join(':')
|
||||
.trim();
|
||||
try {
|
||||
value = JSON.parse(value);
|
||||
} catch (err) {
|
||||
// Ignore the error as it means it's not a JSON value.
|
||||
}
|
||||
metadata[key] = value;
|
||||
}
|
||||
return {metadata, rawContent: both.content};
|
||||
}
|
||||
|
||||
// mdToHtml is a map from a markdown file name to its html link, used to
|
||||
// change relative markdown links that work on GitHub into actual site links
|
||||
function mdToHtml(Metadata, siteConfig) {
|
||||
const {baseUrl, docsUrl} = siteConfig;
|
||||
const result = {};
|
||||
Object.keys(Metadata).forEach(id => {
|
||||
const metadata = Metadata[id];
|
||||
if (metadata.language !== 'en' || metadata.original_id) {
|
||||
return;
|
||||
}
|
||||
let htmlLink = baseUrl + metadata.permalink.replace('/next/', '/');
|
||||
|
||||
const baseDocsPart = `${baseUrl}${docsUrl ? `${docsUrl}/` : ''}`;
|
||||
|
||||
const i18nDocsRegex = new RegExp(`^${baseDocsPart}en/`);
|
||||
const docsRegex = new RegExp(`^${baseDocsPart}`);
|
||||
if (i18nDocsRegex.test(htmlLink)) {
|
||||
htmlLink = htmlLink.replace(i18nDocsRegex, `${baseDocsPart}en/VERSION/`);
|
||||
} else {
|
||||
htmlLink = htmlLink.replace(docsRegex, `${baseDocsPart}VERSION/`);
|
||||
}
|
||||
result[metadata.source] = htmlLink;
|
||||
});
|
||||
return result;
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
extractMetadata,
|
||||
mdToHtml,
|
||||
};
|
88
packages/docusaurus-1.x/lib/server/readCategories.js
Normal file
88
packages/docusaurus-1.x/lib/server/readCategories.js
Normal file
|
@ -0,0 +1,88 @@
|
|||
/**
|
||||
* Copyright (c) 2017-present, Facebook, Inc.
|
||||
*
|
||||
* This source code is licensed under the MIT license found in the
|
||||
* LICENSE file in the root directory of this source tree.
|
||||
*/
|
||||
|
||||
const _ = require('lodash');
|
||||
|
||||
// returns data broken up into categories for a sidebar
|
||||
function readCategories(sidebar, allMetadata, languages) {
|
||||
const allCategories = {};
|
||||
|
||||
// Go through each language that might be defined.
|
||||
languages
|
||||
.filter(lang => lang.enabled)
|
||||
.map(lang => lang.tag)
|
||||
.forEach(language => {
|
||||
// Get all related metadata for the current sidebar and specific to the language.
|
||||
const metadatas = Object.values(allMetadata)
|
||||
.filter(
|
||||
metadata =>
|
||||
metadata.sidebar === sidebar && metadata.language === language,
|
||||
)
|
||||
.sort((a, b) => a.order - b.order);
|
||||
|
||||
// Define the correct order of categories.
|
||||
const sortedCategories = _.uniq(
|
||||
metadatas.map(metadata => metadata.category),
|
||||
);
|
||||
|
||||
const metadatasGroupedByCategory = _.chain(metadatas)
|
||||
.groupBy(metadata => metadata.category)
|
||||
.mapValues(categoryItems => {
|
||||
// Process subcategories.
|
||||
const metadatasGroupedBySubcategory = _.groupBy(
|
||||
categoryItems,
|
||||
item => item.subcategory,
|
||||
);
|
||||
const result = [];
|
||||
const seenSubcategories = new Set();
|
||||
// categoryItems can be links or subcategories. Handle separately.
|
||||
categoryItems.forEach(item => {
|
||||
// Has no subcategory.
|
||||
if (item.subcategory == null) {
|
||||
result.push({
|
||||
type: 'LINK',
|
||||
item,
|
||||
});
|
||||
return;
|
||||
}
|
||||
|
||||
const {subcategory} = item;
|
||||
// Subcategory has been processed, we can skip it.
|
||||
if (seenSubcategories.has(subcategory)) {
|
||||
return;
|
||||
}
|
||||
|
||||
seenSubcategories.add(subcategory);
|
||||
const subcategoryLinks = metadatasGroupedBySubcategory[
|
||||
subcategory
|
||||
].map(subcategoryItem => ({
|
||||
type: 'LINK',
|
||||
item: subcategoryItem,
|
||||
}));
|
||||
result.push({
|
||||
type: 'SUBCATEGORY',
|
||||
title: subcategory,
|
||||
children: subcategoryLinks,
|
||||
});
|
||||
});
|
||||
|
||||
return result;
|
||||
})
|
||||
.value();
|
||||
|
||||
const categories = sortedCategories.map(category => ({
|
||||
type: 'CATEGORY',
|
||||
title: category,
|
||||
children: metadatasGroupedByCategory[category],
|
||||
}));
|
||||
allCategories[language] = categories;
|
||||
});
|
||||
|
||||
return allCategories;
|
||||
}
|
||||
|
||||
module.exports = readCategories;
|
405
packages/docusaurus-1.x/lib/server/readMetadata.js
Normal file
405
packages/docusaurus-1.x/lib/server/readMetadata.js
Normal file
|
@ -0,0 +1,405 @@
|
|||
/**
|
||||
* Copyright (c) 2017-present, Facebook, Inc.
|
||||
*
|
||||
* This source code is licensed under the MIT license found in the
|
||||
* LICENSE file in the root directory of this source tree.
|
||||
*/
|
||||
|
||||
const CWD = process.cwd();
|
||||
|
||||
const path = require('path');
|
||||
const fs = require('fs');
|
||||
const glob = require('glob');
|
||||
|
||||
const metadataUtils = require('./metadataUtils');
|
||||
|
||||
const env = require('./env.js');
|
||||
const blog = require('./blog.js');
|
||||
|
||||
const loadConfig = require('./config');
|
||||
|
||||
const siteConfig = loadConfig(`${CWD}/siteConfig.js`);
|
||||
const versionFallback = require('./versionFallback.js');
|
||||
const utils = require('./utils.js');
|
||||
|
||||
const docsPart = `${siteConfig.docsUrl ? `${siteConfig.docsUrl}/` : ''}`;
|
||||
|
||||
const SupportedHeaderFields = new Set([
|
||||
'id',
|
||||
'title',
|
||||
'author',
|
||||
'authorURL',
|
||||
'authorFBID',
|
||||
'sidebar_label',
|
||||
'original_id',
|
||||
'hide_title',
|
||||
'layout',
|
||||
'custom_edit_url',
|
||||
]);
|
||||
|
||||
let allSidebars;
|
||||
if (fs.existsSync(`${CWD}/sidebars.json`)) {
|
||||
allSidebars = require(`${CWD}/sidebars.json`);
|
||||
} else {
|
||||
allSidebars = {};
|
||||
}
|
||||
|
||||
// Can have a custom docs path. Top level folder still needs to be in directory
|
||||
// at the same level as `website`, not inside `website`.
|
||||
// e.g., docs/whereDocsReallyExist
|
||||
// website-docs/
|
||||
// All .md docs still (currently) must be in one flat directory hierarchy.
|
||||
// e.g., docs/whereDocsReallyExist/*.md (all .md files in this dir)
|
||||
function getDocsPath() {
|
||||
return siteConfig.customDocsPath ? siteConfig.customDocsPath : 'docs';
|
||||
}
|
||||
|
||||
// returns map from id to object containing sidebar ordering info
|
||||
function readSidebar(sidebars = {}) {
|
||||
Object.assign(sidebars, versionFallback.sidebarData());
|
||||
|
||||
const items = {};
|
||||
|
||||
Object.keys(sidebars).forEach(sidebar => {
|
||||
const categories = sidebars[sidebar];
|
||||
const sidebarItems = [];
|
||||
|
||||
Object.keys(categories).forEach(category => {
|
||||
const categoryItems = categories[category];
|
||||
categoryItems.forEach(categoryItem => {
|
||||
if (typeof categoryItem === 'object') {
|
||||
switch (categoryItem.type) {
|
||||
case 'subcategory':
|
||||
categoryItem.ids.forEach(subcategoryItem => {
|
||||
sidebarItems.push({
|
||||
id: subcategoryItem,
|
||||
category,
|
||||
subcategory: categoryItem.label,
|
||||
order: sidebarItems.length + 1,
|
||||
});
|
||||
});
|
||||
return;
|
||||
default:
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
// Is a regular id value.
|
||||
sidebarItems.push({
|
||||
id: categoryItem,
|
||||
category,
|
||||
subcategory: null,
|
||||
order: sidebarItems.length + 1,
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
for (let i = 0; i < sidebarItems.length; i++) {
|
||||
const item = sidebarItems[i];
|
||||
let previous = null;
|
||||
let next = null;
|
||||
|
||||
if (i > 0) {
|
||||
previous = sidebarItems[i - 1].id;
|
||||
}
|
||||
|
||||
if (i < sidebarItems.length - 1) {
|
||||
next = sidebarItems[i + 1].id;
|
||||
}
|
||||
|
||||
items[item.id] = {
|
||||
previous,
|
||||
next,
|
||||
sidebar,
|
||||
category: item.category,
|
||||
subcategory: item.subcategory,
|
||||
order: item.order,
|
||||
};
|
||||
}
|
||||
});
|
||||
|
||||
return items;
|
||||
}
|
||||
|
||||
// process the metadata for a document found in either 'docs' or 'translated_docs'
|
||||
function processMetadata(file, refDir) {
|
||||
const result = metadataUtils.extractMetadata(fs.readFileSync(file, 'utf8'));
|
||||
const language = utils.getLanguage(file, refDir) || 'en';
|
||||
|
||||
const metadata = {};
|
||||
Object.keys(result.metadata).forEach(fieldName => {
|
||||
if (SupportedHeaderFields.has(fieldName)) {
|
||||
metadata[fieldName] = result.metadata[fieldName];
|
||||
} else {
|
||||
console.warn(`Header field "${fieldName}" in ${file} is not supported.`);
|
||||
}
|
||||
});
|
||||
|
||||
const rawContent = result.rawContent;
|
||||
|
||||
if (!metadata.id) {
|
||||
metadata.id = path.basename(file, path.extname(file));
|
||||
}
|
||||
if (metadata.id.includes('/')) {
|
||||
throw new Error('Document id cannot include "/".');
|
||||
}
|
||||
|
||||
// If a file is located in a subdirectory, prepend the subdir to it's ID
|
||||
// Example:
|
||||
// (file: 'docusaurus/docs/projectA/test.md', ID 'test', refDir: 'docusaurus/docs')
|
||||
// returns 'projectA/test'
|
||||
const subDir = utils.getSubDir(file, refDir);
|
||||
if (subDir) {
|
||||
metadata.id = `${subDir}/${metadata.id}`;
|
||||
}
|
||||
|
||||
// Example: `docs/projectA/test.md` source is `projectA/test.md`
|
||||
metadata.source = subDir
|
||||
? `${subDir}/${path.basename(file)}`
|
||||
: path.basename(file);
|
||||
|
||||
if (!metadata.title) {
|
||||
metadata.title = metadata.id;
|
||||
}
|
||||
|
||||
const langPart =
|
||||
env.translation.enabled || siteConfig.useEnglishUrl ? `${language}/` : '';
|
||||
let versionPart = '';
|
||||
if (env.versioning.enabled) {
|
||||
metadata.version = 'next';
|
||||
versionPart = 'next/';
|
||||
}
|
||||
|
||||
metadata.permalink = `${docsPart}${langPart}${versionPart}${
|
||||
metadata.id
|
||||
}.html`;
|
||||
|
||||
// change ids previous, next
|
||||
metadata.localized_id = metadata.id;
|
||||
metadata.id = (env.translation.enabled ? `${language}-` : '') + metadata.id;
|
||||
metadata.language = env.translation.enabled ? language : 'en';
|
||||
|
||||
const items = readSidebar(allSidebars);
|
||||
const id = metadata.localized_id;
|
||||
const item = items[id];
|
||||
if (item) {
|
||||
metadata.sidebar = item.sidebar;
|
||||
metadata.category = item.category;
|
||||
metadata.subcategory = item.subcategory;
|
||||
metadata.order = item.order;
|
||||
|
||||
if (item.next) {
|
||||
metadata.next_id = item.next;
|
||||
metadata.next =
|
||||
(env.translation.enabled ? `${language}-` : '') + item.next;
|
||||
}
|
||||
if (item.previous) {
|
||||
metadata.previous_id = item.previous;
|
||||
metadata.previous =
|
||||
(env.translation.enabled ? `${language}-` : '') + item.previous;
|
||||
}
|
||||
}
|
||||
|
||||
return {metadata, rawContent};
|
||||
}
|
||||
|
||||
// process metadata for all docs and save into core/metadata.js
|
||||
function generateMetadataDocs() {
|
||||
let order;
|
||||
try {
|
||||
order = readSidebar(allSidebars);
|
||||
} catch (e) {
|
||||
console.error(e);
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
const enabledLanguages = env.translation
|
||||
.enabledLanguages()
|
||||
.map(language => language.tag);
|
||||
|
||||
const metadatas = {};
|
||||
const defaultMetadatas = {};
|
||||
|
||||
// metadata for english files
|
||||
const docsDir = path.join(CWD, '../', getDocsPath());
|
||||
let files = glob.sync(`${docsDir}/**`);
|
||||
files.forEach(file => {
|
||||
const extension = path.extname(file);
|
||||
|
||||
if (extension === '.md' || extension === '.markdown') {
|
||||
const res = processMetadata(file, docsDir);
|
||||
|
||||
if (!res) {
|
||||
return;
|
||||
}
|
||||
const metadata = res.metadata;
|
||||
metadatas[metadata.id] = metadata;
|
||||
|
||||
// create a default list of documents for each enabled language based on docs in English
|
||||
// these will get replaced if/when the localized file is downloaded from crowdin
|
||||
enabledLanguages
|
||||
.filter(currentLanguage => currentLanguage !== 'en')
|
||||
.forEach(currentLanguage => {
|
||||
const baseMetadata = Object.assign({}, metadata);
|
||||
baseMetadata.id = baseMetadata.id
|
||||
.toString()
|
||||
.replace(/^en-/, `${currentLanguage}-`);
|
||||
if (baseMetadata.permalink) {
|
||||
baseMetadata.permalink = baseMetadata.permalink
|
||||
.toString()
|
||||
.replace(
|
||||
new RegExp(`^${docsPart}en/`),
|
||||
`${docsPart}${currentLanguage}/`,
|
||||
);
|
||||
}
|
||||
if (baseMetadata.next) {
|
||||
baseMetadata.next = baseMetadata.next
|
||||
.toString()
|
||||
.replace(/^en-/, `${currentLanguage}-`);
|
||||
}
|
||||
if (baseMetadata.previous) {
|
||||
baseMetadata.previous = baseMetadata.previous
|
||||
.toString()
|
||||
.replace(/^en-/, `${currentLanguage}-`);
|
||||
}
|
||||
baseMetadata.language = currentLanguage;
|
||||
defaultMetadatas[baseMetadata.id] = baseMetadata;
|
||||
});
|
||||
Object.assign(metadatas, defaultMetadatas);
|
||||
}
|
||||
});
|
||||
|
||||
// metadata for non-english docs
|
||||
const translatedDir = path.join(CWD, 'translated_docs');
|
||||
files = glob.sync(`${CWD}/translated_docs/**`);
|
||||
files.forEach(file => {
|
||||
if (!utils.getLanguage(file, translatedDir)) {
|
||||
return;
|
||||
}
|
||||
|
||||
const extension = path.extname(file);
|
||||
|
||||
if (extension === '.md' || extension === '.markdown') {
|
||||
const res = processMetadata(file, translatedDir);
|
||||
if (!res) {
|
||||
return;
|
||||
}
|
||||
const metadata = res.metadata;
|
||||
metadatas[metadata.id] = metadata;
|
||||
}
|
||||
});
|
||||
|
||||
// metadata for versioned docs
|
||||
const versionData = versionFallback.docData();
|
||||
versionData.forEach(metadata => {
|
||||
const id = metadata.localized_id;
|
||||
if (order[id]) {
|
||||
metadata.sidebar = order[id].sidebar;
|
||||
metadata.category = order[id].category;
|
||||
metadata.subcategory = order[id].subcategory;
|
||||
metadata.order = order[id].order;
|
||||
|
||||
if (order[id].next) {
|
||||
metadata.next_id = order[id].next.replace(
|
||||
`version-${metadata.version}-`,
|
||||
'',
|
||||
);
|
||||
metadata.next =
|
||||
(env.translation.enabled ? `${metadata.language}-` : '') +
|
||||
order[id].next;
|
||||
}
|
||||
if (order[id].previous) {
|
||||
metadata.previous_id = order[id].previous.replace(
|
||||
`version-${metadata.version}-`,
|
||||
'',
|
||||
);
|
||||
metadata.previous =
|
||||
(env.translation.enabled ? `${metadata.language}-` : '') +
|
||||
order[id].previous;
|
||||
}
|
||||
}
|
||||
metadatas[metadata.id] = metadata;
|
||||
});
|
||||
|
||||
// Get the titles of the previous and next ids so that we can use them in
|
||||
// navigation buttons in DocsLayout.js
|
||||
Object.keys(metadatas).forEach(metadata => {
|
||||
if (metadatas[metadata].previous) {
|
||||
if (metadatas[metadatas[metadata].previous]) {
|
||||
metadatas[metadata].previous_title =
|
||||
metadatas[metadatas[metadata].previous].title;
|
||||
} else {
|
||||
metadatas[metadata].previous_title = 'Previous';
|
||||
}
|
||||
}
|
||||
if (metadatas[metadata].next) {
|
||||
if (metadatas[metadatas[metadata].next]) {
|
||||
metadatas[metadata].next_title =
|
||||
metadatas[metadatas[metadata].next].title;
|
||||
} else {
|
||||
metadatas[metadata].next_title = 'Next';
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
fs.writeFileSync(
|
||||
path.join(__dirname, '/../core/metadata.js'),
|
||||
`${'/**\n' +
|
||||
' * @' +
|
||||
'generated\n' + // separate this out for Nuclide treating @generated as readonly
|
||||
' */\n' +
|
||||
'module.exports = '}${JSON.stringify(metadatas, null, 2)};\n`,
|
||||
);
|
||||
}
|
||||
|
||||
// process metadata for blog posts and save into core/MetadataBlog.js
|
||||
function generateMetadataBlog() {
|
||||
const metadatas = [];
|
||||
|
||||
const files = glob.sync(`${CWD}/blog/**/*.*`);
|
||||
files
|
||||
.sort()
|
||||
.reverse()
|
||||
.forEach(file => {
|
||||
const extension = path.extname(file);
|
||||
if (extension !== '.md' && extension !== '.markdown') {
|
||||
return;
|
||||
}
|
||||
const metadata = blog.getMetadata(file);
|
||||
// Extract, YYYY, MM, DD from the file name
|
||||
const filePathDateArr = path
|
||||
.basename(file)
|
||||
.toString()
|
||||
.split('-');
|
||||
metadata.date = new Date(
|
||||
`${filePathDateArr[0]}-${filePathDateArr[1]}-${
|
||||
filePathDateArr[2]
|
||||
}T06:00:00.000Z`,
|
||||
);
|
||||
// allow easier sorting of blog by providing seconds since epoch
|
||||
metadata.seconds = Math.round(metadata.date.getTime() / 1000);
|
||||
|
||||
metadatas.push(metadata);
|
||||
});
|
||||
|
||||
const sortedMetadatas = metadatas.sort(
|
||||
(a, b) => parseInt(b.seconds, 10) - parseInt(a.seconds, 10),
|
||||
);
|
||||
|
||||
fs.writeFileSync(
|
||||
path.join(__dirname, '/../core/MetadataBlog.js'),
|
||||
`${'/**\n' +
|
||||
' * @' +
|
||||
'generated\n' + // separate this out for Nuclide treating @generated as readonly
|
||||
' */\n' +
|
||||
'module.exports = '}${JSON.stringify(sortedMetadatas, null, 2)};\n`,
|
||||
);
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
getDocsPath,
|
||||
readSidebar,
|
||||
processMetadata,
|
||||
generateMetadataDocs,
|
||||
generateMetadataBlog,
|
||||
};
|
21
packages/docusaurus-1.x/lib/server/renderUtils.js
Normal file
21
packages/docusaurus-1.x/lib/server/renderUtils.js
Normal file
|
@ -0,0 +1,21 @@
|
|||
/**
|
||||
* Copyright (c) 2017-present, Facebook, Inc.
|
||||
*
|
||||
* This source code is licensed under the MIT license found in the
|
||||
* LICENSE file in the root directory of this source tree.
|
||||
*/
|
||||
|
||||
const renderToStaticMarkup = require('react-dom/server').renderToStaticMarkup;
|
||||
|
||||
/**
|
||||
* Custom function that wraps renderToStaticMarkup so that we can inject
|
||||
* doctype before React renders the contents. All instance of full-page
|
||||
* rendering within Docusaurus should use this function instead.
|
||||
*/
|
||||
function renderToStaticMarkupWithDoctype(...args) {
|
||||
return `<!DOCTYPE html>${renderToStaticMarkup(...args)}`;
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
renderToStaticMarkupWithDoctype,
|
||||
};
|
55
packages/docusaurus-1.x/lib/server/routing.js
Normal file
55
packages/docusaurus-1.x/lib/server/routing.js
Normal file
|
@ -0,0 +1,55 @@
|
|||
/**
|
||||
* Copyright (c) 2017-present, Facebook, Inc.
|
||||
*
|
||||
* This source code is licensed under the MIT license found in the
|
||||
* LICENSE file in the root directory of this source tree.
|
||||
*/
|
||||
function blog(siteConfig) {
|
||||
return new RegExp(`^${siteConfig.baseUrl}blog/.*html$`);
|
||||
}
|
||||
|
||||
function docs(siteConfig) {
|
||||
const docsPart = `${siteConfig.docsUrl ? `${siteConfig.docsUrl}/` : ''}`;
|
||||
return new RegExp(`^${siteConfig.baseUrl}${docsPart}.*html$`);
|
||||
}
|
||||
|
||||
function dotfiles() {
|
||||
return /(?!.*html$)^\/.*\.[^\n/]+$/;
|
||||
}
|
||||
|
||||
function feed(siteConfig) {
|
||||
return new RegExp(`^${siteConfig.baseUrl}blog/(feed.xml|atom.xml)$`);
|
||||
}
|
||||
|
||||
function noExtension() {
|
||||
return /\/[^.]*\/?$/;
|
||||
}
|
||||
|
||||
function page(siteConfig) {
|
||||
const gr = regex => regex.toString().replace(/(^\/|\/$)/gm, '');
|
||||
|
||||
if (siteConfig.docsUrl === '') {
|
||||
return new RegExp(
|
||||
`(?!${gr(blog(siteConfig))})^${siteConfig.baseUrl}.*.html$`,
|
||||
);
|
||||
}
|
||||
return new RegExp(
|
||||
`(?!${gr(blog(siteConfig))}|${gr(docs(siteConfig))})^${
|
||||
siteConfig.baseUrl
|
||||
}.*.html$`,
|
||||
);
|
||||
}
|
||||
|
||||
function sitemap(siteConfig) {
|
||||
return new RegExp(`^${siteConfig.baseUrl}sitemap.xml$`);
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
blog,
|
||||
docs,
|
||||
dotfiles,
|
||||
feed,
|
||||
page,
|
||||
noExtension,
|
||||
sitemap,
|
||||
};
|
383
packages/docusaurus-1.x/lib/server/server.js
Normal file
383
packages/docusaurus-1.x/lib/server/server.js
Normal file
|
@ -0,0 +1,383 @@
|
|||
/**
|
||||
* Copyright (c) 2017-present, Facebook, Inc.
|
||||
*
|
||||
* This source code is licensed under the MIT license found in the
|
||||
* LICENSE file in the root directory of this source tree.
|
||||
*/
|
||||
|
||||
/* eslint-disable no-cond-assign */
|
||||
|
||||
function execute(port) {
|
||||
const extractTranslations = require('../write-translations');
|
||||
const metadataUtils = require('./metadataUtils');
|
||||
const blog = require('./blog');
|
||||
const docs = require('./docs');
|
||||
const env = require('./env.js');
|
||||
const express = require('express');
|
||||
const React = require('react');
|
||||
const request = require('request');
|
||||
const fs = require('fs-extra');
|
||||
const path = require('path');
|
||||
const {isSeparateCss} = require('./utils');
|
||||
const mkdirp = require('mkdirp');
|
||||
const glob = require('glob');
|
||||
const chalk = require('chalk');
|
||||
const translate = require('./translate');
|
||||
const {renderToStaticMarkupWithDoctype} = require('./renderUtils');
|
||||
const feed = require('./feed');
|
||||
const sitemap = require('./sitemap');
|
||||
const routing = require('./routing');
|
||||
const loadConfig = require('./config');
|
||||
const CWD = process.cwd();
|
||||
const join = path.join;
|
||||
const sep = path.sep;
|
||||
|
||||
function removeModulePathFromCache(moduleName) {
|
||||
/* eslint-disable no-underscore-dangle */
|
||||
Object.keys(module.constructor._pathCache).forEach(cacheKey => {
|
||||
if (cacheKey.indexOf(moduleName) > 0) {
|
||||
delete module.constructor._pathCache[cacheKey];
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
// Remove a module and child modules from require cache, so server
|
||||
// does not have to be restarted.
|
||||
function removeModuleAndChildrenFromCache(moduleName) {
|
||||
let mod = require.resolve(moduleName);
|
||||
if (mod && (mod = require.cache[mod])) {
|
||||
mod.children.forEach(child => {
|
||||
delete require.cache[child.id];
|
||||
removeModulePathFromCache(mod.id);
|
||||
});
|
||||
delete require.cache[mod.id];
|
||||
removeModulePathFromCache(mod.id);
|
||||
}
|
||||
}
|
||||
|
||||
const readMetadata = require('./readMetadata.js');
|
||||
let Metadata;
|
||||
let MetadataBlog;
|
||||
let siteConfig;
|
||||
|
||||
function reloadMetadata() {
|
||||
removeModuleAndChildrenFromCache('./readMetadata.js');
|
||||
readMetadata.generateMetadataDocs();
|
||||
removeModuleAndChildrenFromCache('../core/metadata.js');
|
||||
Metadata = require('../core/metadata.js');
|
||||
}
|
||||
|
||||
function reloadMetadataBlog() {
|
||||
if (fs.existsSync(join(__dirname, '..', 'core', 'MetadataBlog.js'))) {
|
||||
removeModuleAndChildrenFromCache(join('..', 'core', 'MetadataBlog.js'));
|
||||
fs.removeSync(join(__dirname, '..', 'core', 'MetadataBlog.js'));
|
||||
}
|
||||
readMetadata.generateMetadataBlog();
|
||||
MetadataBlog = require(join('..', 'core', 'MetadataBlog.js'));
|
||||
}
|
||||
|
||||
function reloadSiteConfig() {
|
||||
const siteConfigPath = join(CWD, 'siteConfig.js');
|
||||
removeModuleAndChildrenFromCache(siteConfigPath);
|
||||
siteConfig = loadConfig(siteConfigPath);
|
||||
}
|
||||
|
||||
function requestFile(url, res, notFoundCallback) {
|
||||
request.get(url, (error, response, body) => {
|
||||
if (!error) {
|
||||
if (response) {
|
||||
if (response.statusCode === 404 && notFoundCallback) {
|
||||
notFoundCallback();
|
||||
} else {
|
||||
res.status(response.statusCode).send(body);
|
||||
}
|
||||
} else {
|
||||
console.error('No response');
|
||||
}
|
||||
} else {
|
||||
console.error('Request failed:', error);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
reloadMetadata();
|
||||
reloadMetadataBlog();
|
||||
extractTranslations();
|
||||
reloadSiteConfig();
|
||||
|
||||
const app = express();
|
||||
|
||||
app.get(routing.docs(siteConfig), (req, res, next) => {
|
||||
const url = decodeURI(req.path.toString().replace(siteConfig.baseUrl, ''));
|
||||
const metadata =
|
||||
Metadata[
|
||||
Object.keys(Metadata).find(id => Metadata[id].permalink === url)
|
||||
];
|
||||
|
||||
const file = docs.getFile(metadata);
|
||||
if (!file) {
|
||||
next();
|
||||
return;
|
||||
}
|
||||
const rawContent = metadataUtils.extractMetadata(file).rawContent;
|
||||
removeModuleAndChildrenFromCache('../core/DocsLayout.js');
|
||||
const mdToHtml = metadataUtils.mdToHtml(Metadata, siteConfig);
|
||||
res.send(docs.getMarkup(rawContent, mdToHtml, metadata));
|
||||
});
|
||||
|
||||
app.get(routing.sitemap(siteConfig), (req, res) => {
|
||||
sitemap((err, xml) => {
|
||||
if (err) {
|
||||
res.status(500).send('Sitemap error');
|
||||
} else {
|
||||
res.set('Content-Type', 'application/xml');
|
||||
res.send(xml);
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
app.get(routing.feed(siteConfig), (req, res, next) => {
|
||||
res.set('Content-Type', 'application/rss+xml');
|
||||
const file = req.path
|
||||
.toString()
|
||||
.split('blog/')[1]
|
||||
.toLowerCase();
|
||||
if (file === 'atom.xml') {
|
||||
res.send(feed('atom'));
|
||||
} else if (file === 'feed.xml') {
|
||||
res.send(feed('rss'));
|
||||
}
|
||||
next();
|
||||
});
|
||||
|
||||
app.get(routing.blog(siteConfig), (req, res, next) => {
|
||||
// Regenerate the blog metadata in case it has changed. Consider improving
|
||||
// this to regenerate on file save rather than on page request.
|
||||
reloadMetadataBlog();
|
||||
removeModuleAndChildrenFromCache(join('..', 'core', 'BlogPageLayout.js'));
|
||||
const blogPages = blog.getPagesMarkup(MetadataBlog.length, siteConfig);
|
||||
const urlPath = req.path.toString().split('blog/')[1];
|
||||
|
||||
if (urlPath === 'index.html') {
|
||||
res.send(blogPages['/index.html']);
|
||||
} else if (urlPath.endsWith('/index.html') && blogPages[urlPath]) {
|
||||
res.send(blogPages[urlPath]);
|
||||
} else if (urlPath.match(/page([0-9]+)/)) {
|
||||
res.send(blogPages[`${urlPath.replace(/\/$/, '')}/index.html`]);
|
||||
} else {
|
||||
const file = join(CWD, 'blog', blog.urlToSource(urlPath));
|
||||
removeModuleAndChildrenFromCache(join('..', 'core', 'BlogPostLayout.js'));
|
||||
const blogPost = blog.getPostMarkup(file, siteConfig);
|
||||
if (!blogPost) {
|
||||
next();
|
||||
return;
|
||||
}
|
||||
res.send(blogPost);
|
||||
}
|
||||
});
|
||||
|
||||
app.get(routing.page(siteConfig), (req, res, next) => {
|
||||
// Look for user-provided HTML file first.
|
||||
let htmlFile = req.path.toString().replace(siteConfig.baseUrl, '');
|
||||
htmlFile = join(CWD, 'pages', htmlFile);
|
||||
if (
|
||||
fs.existsSync(htmlFile) ||
|
||||
fs.existsSync(
|
||||
(htmlFile = htmlFile.replace(
|
||||
path.basename(htmlFile),
|
||||
join('en', path.basename(htmlFile)),
|
||||
)),
|
||||
)
|
||||
) {
|
||||
if (siteConfig.wrapPagesHTML) {
|
||||
removeModuleAndChildrenFromCache(join('..', 'core', 'Site.js'));
|
||||
const Site = require(join('..', 'core', 'Site.js'));
|
||||
const str = renderToStaticMarkupWithDoctype(
|
||||
<Site
|
||||
language="en"
|
||||
config={siteConfig}
|
||||
metadata={{id: path.basename(htmlFile, '.html')}}>
|
||||
<div
|
||||
dangerouslySetInnerHTML={{
|
||||
__html: fs.readFileSync(htmlFile, {encoding: 'utf8'}),
|
||||
}}
|
||||
/>
|
||||
</Site>,
|
||||
);
|
||||
|
||||
res.send(str);
|
||||
} else {
|
||||
res.send(fs.readFileSync(htmlFile, {encoding: 'utf8'}));
|
||||
}
|
||||
next();
|
||||
return;
|
||||
}
|
||||
|
||||
// look for user provided react file either in specified path or in path for english files
|
||||
let file = req.path.toString().replace(/\.html$/, '.js');
|
||||
file = file.replace(siteConfig.baseUrl, '');
|
||||
let userFile = join(CWD, 'pages', file);
|
||||
|
||||
let language = env.translation.enabled ? 'en' : '';
|
||||
const regexLang = /(.*)\/.*\.html$/;
|
||||
const match = regexLang.exec(req.path);
|
||||
const parts = match[1].split('/');
|
||||
const enabledLangTags = env.translation
|
||||
.enabledLanguages()
|
||||
.map(lang => lang.tag);
|
||||
|
||||
for (let i = 0; i < parts.length; i++) {
|
||||
if (enabledLangTags.indexOf(parts[i]) !== -1) {
|
||||
language = parts[i];
|
||||
}
|
||||
}
|
||||
|
||||
let englishFile = join(CWD, 'pages', file);
|
||||
if (language && language !== 'en') {
|
||||
englishFile = englishFile.replace(sep + language + sep, `${sep}en${sep}`);
|
||||
}
|
||||
|
||||
// check for: a file for the page, an english file for page with unspecified language, or an
|
||||
// english file for the page
|
||||
if (
|
||||
fs.existsSync(userFile) ||
|
||||
fs.existsSync(
|
||||
(userFile = userFile.replace(
|
||||
path.basename(userFile),
|
||||
`en${sep}${path.basename(userFile)}`,
|
||||
)),
|
||||
) ||
|
||||
fs.existsSync((userFile = englishFile))
|
||||
) {
|
||||
// copy into docusaurus so require paths work
|
||||
const userFileParts = userFile.split(`pages${sep}`);
|
||||
let tempFile = join(__dirname, '..', 'pages', userFileParts[1]);
|
||||
tempFile = tempFile.replace(
|
||||
path.basename(file),
|
||||
`temp${path.basename(file)}`,
|
||||
);
|
||||
mkdirp.sync(path.dirname(tempFile));
|
||||
fs.copySync(userFile, tempFile);
|
||||
|
||||
// render into a string
|
||||
removeModuleAndChildrenFromCache(tempFile);
|
||||
const ReactComp = require(tempFile);
|
||||
removeModuleAndChildrenFromCache(join('..', 'core', 'Site.js'));
|
||||
const Site = require(join('..', 'core', 'Site.js'));
|
||||
translate.setLanguage(language);
|
||||
const str = renderToStaticMarkupWithDoctype(
|
||||
<Site
|
||||
language={language}
|
||||
config={siteConfig}
|
||||
title={ReactComp.title}
|
||||
description={ReactComp.description}
|
||||
metadata={{id: path.basename(userFile, '.js')}}>
|
||||
<ReactComp config={siteConfig} language={language} />
|
||||
</Site>,
|
||||
);
|
||||
|
||||
fs.removeSync(tempFile);
|
||||
|
||||
res.send(str);
|
||||
} else {
|
||||
next();
|
||||
}
|
||||
});
|
||||
|
||||
app.get(`${siteConfig.baseUrl}css/main.css`, (req, res) => {
|
||||
const mainCssPath = join(
|
||||
__dirname,
|
||||
'..',
|
||||
'static',
|
||||
req.path.toString().replace(siteConfig.baseUrl, '/'),
|
||||
);
|
||||
let cssContent = fs.readFileSync(mainCssPath, {encoding: 'utf8'});
|
||||
|
||||
const files = glob.sync(join(CWD, 'static', '**', '*.css'));
|
||||
|
||||
files.forEach(file => {
|
||||
if (isSeparateCss(file, siteConfig.separateCss)) {
|
||||
return;
|
||||
}
|
||||
cssContent = `${cssContent}\n${fs.readFileSync(file, {
|
||||
encoding: 'utf8',
|
||||
})}`;
|
||||
});
|
||||
|
||||
if (
|
||||
!siteConfig.colors ||
|
||||
!siteConfig.colors.primaryColor ||
|
||||
!siteConfig.colors.secondaryColor
|
||||
) {
|
||||
console.error(
|
||||
`${chalk.yellow(
|
||||
'Missing color configuration.',
|
||||
)} Make sure siteConfig.colors includes primaryColor and secondaryColor fields.`,
|
||||
);
|
||||
}
|
||||
|
||||
Object.keys(siteConfig.colors).forEach(key => {
|
||||
const color = siteConfig.colors[key];
|
||||
cssContent = cssContent.replace(new RegExp(`\\$${key}`, 'g'), color);
|
||||
});
|
||||
|
||||
if (siteConfig.fonts) {
|
||||
Object.keys(siteConfig.fonts).forEach(key => {
|
||||
const fontString = siteConfig.fonts[key]
|
||||
.map(font => `"${font}"`)
|
||||
.join(', ');
|
||||
cssContent = cssContent.replace(
|
||||
new RegExp(`\\$${key}`, 'g'),
|
||||
fontString,
|
||||
);
|
||||
});
|
||||
}
|
||||
|
||||
res.header('Content-Type', 'text/css');
|
||||
res.send(cssContent);
|
||||
});
|
||||
|
||||
// serve static assets from these locations
|
||||
app.use(
|
||||
`${siteConfig.baseUrl}${
|
||||
siteConfig.docsUrl ? `${siteConfig.docsUrl}/` : ''
|
||||
}assets`,
|
||||
express.static(join(CWD, '..', readMetadata.getDocsPath(), 'assets')),
|
||||
);
|
||||
app.use(
|
||||
`${siteConfig.baseUrl}blog/assets`,
|
||||
express.static(join(CWD, 'blog', 'assets')),
|
||||
);
|
||||
app.use(siteConfig.baseUrl, express.static(join(CWD, 'static')));
|
||||
app.use(siteConfig.baseUrl, express.static(join(__dirname, '..', 'static')));
|
||||
|
||||
// "redirect" requests to pages ending with "/" or no extension so that,
|
||||
// for example, request to "blog" returns "blog/index.html" or "blog.html"
|
||||
app.get(routing.noExtension(), (req, res, next) => {
|
||||
const slash = req.path.toString().endsWith('/') ? '' : '/';
|
||||
const requestUrl = `http://localhost:${port}${req.path}`;
|
||||
requestFile(`${requestUrl + slash}index.html`, res, () => {
|
||||
requestFile(
|
||||
slash === '/'
|
||||
? `${requestUrl}.html`
|
||||
: requestUrl.replace(/\/$/, '.html'),
|
||||
res,
|
||||
next,
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
// handle special cleanUrl case like '/blog/1.2.3' & '/blog.robots.hai'
|
||||
// where we should try to serve '/blog/1.2.3.html' & '/blog.robots.hai.html'
|
||||
app.get(routing.dotfiles(), (req, res, next) => {
|
||||
if (!siteConfig.cleanUrl) {
|
||||
next();
|
||||
return;
|
||||
}
|
||||
requestFile(`http://localhost:${port}${req.path}.html`, res, next);
|
||||
});
|
||||
|
||||
app.listen(port);
|
||||
}
|
||||
|
||||
module.exports = execute;
|
100
packages/docusaurus-1.x/lib/server/sitemap.js
Normal file
100
packages/docusaurus-1.x/lib/server/sitemap.js
Normal file
|
@ -0,0 +1,100 @@
|
|||
/**
|
||||
* Copyright (c) 2017-present, Facebook, Inc.
|
||||
*
|
||||
* This source code is licensed under the MIT license found in the
|
||||
* LICENSE file in the root directory of this source tree.
|
||||
*/
|
||||
|
||||
const fs = require('fs-extra');
|
||||
|
||||
const glob = require('glob');
|
||||
|
||||
const CWD = process.cwd();
|
||||
|
||||
const sitemap = require('sitemap');
|
||||
const utils = require('../core/utils');
|
||||
|
||||
const loadConfig = require('./config');
|
||||
|
||||
const siteConfig = loadConfig(`${CWD}/siteConfig.js`);
|
||||
|
||||
const readMetadata = require('./readMetadata.js');
|
||||
|
||||
readMetadata.generateMetadataDocs();
|
||||
const Metadata = require('../core/metadata.js');
|
||||
|
||||
readMetadata.generateMetadataBlog();
|
||||
const MetadataBlog = require('../core/MetadataBlog.js');
|
||||
|
||||
module.exports = function(callback) {
|
||||
console.log('sitemap.js triggered...');
|
||||
|
||||
const files = glob.sync(`${CWD}/pages/en/**/*.js`);
|
||||
|
||||
// English-only is the default.
|
||||
let enabledLanguages = [
|
||||
{
|
||||
enabled: true,
|
||||
name: 'English',
|
||||
tag: 'en',
|
||||
},
|
||||
];
|
||||
|
||||
// If we have a languages.js file, get all the enabled languages in there
|
||||
if (fs.existsSync(`${CWD}/languages.js`)) {
|
||||
const languages = require(`${CWD}/languages.js`);
|
||||
enabledLanguages = languages.filter(lang => lang.enabled);
|
||||
}
|
||||
|
||||
// Create a url mapping to all the enabled languages files
|
||||
const urls = files.map(file => {
|
||||
let url = file.split('/pages/en')[1];
|
||||
url = siteConfig.cleanUrl
|
||||
? url.replace(/\.js$/, '')
|
||||
: url.replace(/\.js$/, '.html');
|
||||
const links = enabledLanguages.map(lang => {
|
||||
const langUrl = lang.tag + url;
|
||||
return {lang: lang.tag, url: langUrl};
|
||||
});
|
||||
return {url, changefreq: 'weekly', priority: 0.5, links};
|
||||
});
|
||||
|
||||
MetadataBlog.forEach(blog => {
|
||||
urls.push({
|
||||
url: `/blog/${utils.getPath(blog.path, siteConfig.cleanUrl)}`,
|
||||
changefreq: 'weekly',
|
||||
priority: 0.3,
|
||||
});
|
||||
});
|
||||
|
||||
Object.keys(Metadata)
|
||||
.filter(key => Metadata[key].language === 'en')
|
||||
.forEach(key => {
|
||||
const doc = Metadata[key];
|
||||
const docUrl = utils.getPath(doc.permalink, siteConfig.cleanUrl);
|
||||
const docsPart = `${siteConfig.docsUrl ? `${siteConfig.docsUrl}/` : ''}`;
|
||||
const links = enabledLanguages.map(lang => {
|
||||
const langUrl = docUrl.replace(
|
||||
new RegExp(`^${docsPart}en/`),
|
||||
`${docsPart}${lang.tag}/`,
|
||||
);
|
||||
return {lang: lang.tag, url: langUrl};
|
||||
});
|
||||
urls.push({
|
||||
url: docUrl,
|
||||
changefreq: 'hourly',
|
||||
priority: 1.0,
|
||||
links,
|
||||
});
|
||||
});
|
||||
|
||||
const sm = sitemap.createSitemap({
|
||||
hostname: siteConfig.url + siteConfig.baseUrl,
|
||||
cacheTime: 600 * 1000, // 600 sec - cache purge period
|
||||
urls,
|
||||
});
|
||||
|
||||
sm.toXML((err, xml) => {
|
||||
callback(err, xml);
|
||||
});
|
||||
};
|
51
packages/docusaurus-1.x/lib/server/start.js
Normal file
51
packages/docusaurus-1.x/lib/server/start.js
Normal file
|
@ -0,0 +1,51 @@
|
|||
/**
|
||||
* Copyright (c) 2017-present, Facebook, Inc.
|
||||
*
|
||||
* This source code is licensed under the MIT license found in the
|
||||
* LICENSE file in the root directory of this source tree.
|
||||
*/
|
||||
|
||||
const program = require('commander');
|
||||
const openBrowser = require('react-dev-utils/openBrowser');
|
||||
const portFinder = require('portfinder');
|
||||
const liveReloadServer = require('./liveReloadServer.js');
|
||||
const server = require('./server.js');
|
||||
|
||||
const CWD = process.cwd();
|
||||
|
||||
function startLiveReloadServer() {
|
||||
const promise = portFinder.getPortPromise({port: 35729}).then(port => {
|
||||
liveReloadServer.start(port);
|
||||
});
|
||||
return promise;
|
||||
}
|
||||
|
||||
function startServer() {
|
||||
const initialServerPort =
|
||||
parseInt(program.port, 10) || process.env.PORT || 3000;
|
||||
const promise = portFinder
|
||||
.getPortPromise({port: initialServerPort})
|
||||
.then(port => {
|
||||
server(port);
|
||||
const {baseUrl} = require(`${CWD}/siteConfig.js`);
|
||||
const serverAddress = `http://localhost:${port}${baseUrl}`;
|
||||
console.log('Docusaurus server started on port %d', port);
|
||||
openBrowser(serverAddress);
|
||||
});
|
||||
return promise;
|
||||
}
|
||||
|
||||
function startDocusaurus() {
|
||||
if (program.watch) {
|
||||
return startLiveReloadServer()
|
||||
.catch(ex => console.warn(`Failed to start live reload server: ${ex}`))
|
||||
.then(() => startServer());
|
||||
}
|
||||
return startServer();
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
startDocusaurus,
|
||||
startServer,
|
||||
startLiveReloadServer,
|
||||
};
|
47
packages/docusaurus-1.x/lib/server/translate-plugin.js
Normal file
47
packages/docusaurus-1.x/lib/server/translate-plugin.js
Normal file
|
@ -0,0 +1,47 @@
|
|||
/**
|
||||
* Copyright (c) 2017-present, Facebook, Inc.
|
||||
*
|
||||
* This source code is licensed under the MIT license found in the
|
||||
* LICENSE file in the root directory of this source tree.
|
||||
*/
|
||||
|
||||
/* replaces translate tags with calls to translate function */
|
||||
|
||||
module.exports = function translatePlugin(babel) {
|
||||
const {types: t} = babel;
|
||||
|
||||
return {
|
||||
visitor: {
|
||||
JSXElement(path) {
|
||||
if (path.node.openingElement.name.name !== 'translate') {
|
||||
return;
|
||||
}
|
||||
/* assume translate element only has one child which is the text */
|
||||
const text = path.node.children[0].value.trim().replace(/\s+/g, ' ');
|
||||
let description = 'no description given';
|
||||
const attributes = path.node.openingElement.attributes;
|
||||
for (let i = 0; i < attributes.length; i++) {
|
||||
if (attributes[i].name.name === 'desc') {
|
||||
description = attributes[i].value.value;
|
||||
}
|
||||
}
|
||||
/* use an expression container if inside a JSXElement */
|
||||
if (path.findParent(() => true).node.type === 'JSXElement') {
|
||||
path.replaceWith(
|
||||
t.jSXExpressionContainer(
|
||||
t.callExpression(t.identifier('translate'), [
|
||||
t.stringLiteral(`${text}|${description}`),
|
||||
]),
|
||||
),
|
||||
);
|
||||
} else {
|
||||
path.replaceWith(
|
||||
t.callExpression(t.identifier('translate'), [
|
||||
t.stringLiteral(`${text}|${description}`),
|
||||
]),
|
||||
);
|
||||
}
|
||||
},
|
||||
},
|
||||
};
|
||||
};
|
65
packages/docusaurus-1.x/lib/server/translate.js
Normal file
65
packages/docusaurus-1.x/lib/server/translate.js
Normal file
|
@ -0,0 +1,65 @@
|
|||
/**
|
||||
* Copyright (c) 2017-present, Facebook, Inc.
|
||||
*
|
||||
* This source code is licensed under the MIT license found in the
|
||||
* LICENSE file in the root directory of this source tree.
|
||||
*/
|
||||
|
||||
const translation = require('./translation.js');
|
||||
|
||||
let language = 'en';
|
||||
|
||||
/* handle escaped characters that get converted into json strings */
|
||||
function parseEscapeSequences(str) {
|
||||
return str
|
||||
.replace(new RegExp('\\\\n', 'g'), '\n')
|
||||
.replace(new RegExp('\\\\b', 'g'), '\b')
|
||||
.replace(new RegExp('\\\\f', 'g'), '\f')
|
||||
.replace(new RegExp('\\\\r', 'g'), '\r')
|
||||
.replace(new RegExp('\\\\t', 'g'), '\t')
|
||||
.replace(new RegExp("\\\\'", 'g'), "'")
|
||||
.replace(new RegExp('\\\\"', 'g'), '"')
|
||||
.replace(new RegExp('\\\\', 'g'), '\\');
|
||||
}
|
||||
|
||||
function setLanguage(lang) {
|
||||
language = lang;
|
||||
}
|
||||
|
||||
function doesTranslationExist(str, lang) {
|
||||
return (
|
||||
translation[lang] &&
|
||||
translation[lang]['pages-strings'] &&
|
||||
translation[lang]['pages-strings'][str]
|
||||
);
|
||||
}
|
||||
|
||||
function translate(str) {
|
||||
if (!language || language === '') {
|
||||
// Check English, just in case; otherwise, just return the raw string back
|
||||
if (doesTranslationExist(str, 'en')) {
|
||||
return parseEscapeSequences(translation.en['pages-strings'][str]);
|
||||
}
|
||||
return str;
|
||||
}
|
||||
|
||||
if (!doesTranslationExist(str, language)) {
|
||||
// if a translated string doesn't exist, but english does then fallback
|
||||
if (doesTranslationExist(str, 'en')) {
|
||||
console.error(
|
||||
`Could not find a string translation in '${language}' for string '${str}'. Using English version instead.`,
|
||||
);
|
||||
|
||||
return parseEscapeSequences(translation.en['pages-strings'][str]);
|
||||
}
|
||||
throw new Error(
|
||||
`Text that you've identified for translation ('${str}') hasn't been added to the global list in 'en.json'. To solve this problem run 'yarn write-translations'.`,
|
||||
);
|
||||
}
|
||||
return parseEscapeSequences(translation[language]['pages-strings'][str]);
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
setLanguage,
|
||||
translate,
|
||||
};
|
44
packages/docusaurus-1.x/lib/server/translation.js
Normal file
44
packages/docusaurus-1.x/lib/server/translation.js
Normal file
|
@ -0,0 +1,44 @@
|
|||
/**
|
||||
* Copyright (c) 2017-present, Facebook, Inc.
|
||||
*
|
||||
* This source code is licensed under the MIT license found in the
|
||||
* LICENSE file in the root directory of this source tree.
|
||||
*/
|
||||
|
||||
// translation object contains all translations for each string in i18n/en.json
|
||||
|
||||
const CWD = process.cwd();
|
||||
const fs = require('fs');
|
||||
const glob = require('glob');
|
||||
const path = require('path');
|
||||
|
||||
let languages;
|
||||
if (fs.existsSync(`${CWD}/languages.js`)) {
|
||||
languages = require(`${CWD}/languages.js`);
|
||||
} else {
|
||||
languages = [
|
||||
{
|
||||
enabled: true,
|
||||
name: 'English',
|
||||
tag: 'en',
|
||||
},
|
||||
];
|
||||
}
|
||||
|
||||
const enabledLanguages = languages.filter(lang => lang.enabled);
|
||||
|
||||
const translation = {languages: enabledLanguages};
|
||||
|
||||
const files = glob.sync(`${CWD}/i18n/**`);
|
||||
const langRegex = /\/i18n\/(.*)\.json$/;
|
||||
|
||||
files.forEach(file => {
|
||||
const extension = path.extname(file);
|
||||
if (extension === '.json') {
|
||||
const match = langRegex.exec(file);
|
||||
const language = match[1];
|
||||
translation[language] = require(file);
|
||||
}
|
||||
});
|
||||
|
||||
module.exports = translation;
|
88
packages/docusaurus-1.x/lib/server/utils.js
Normal file
88
packages/docusaurus-1.x/lib/server/utils.js
Normal file
|
@ -0,0 +1,88 @@
|
|||
/**
|
||||
* Copyright (c) 2017-present, Facebook, Inc.
|
||||
*
|
||||
* This source code is licensed under the MIT license found in the
|
||||
* LICENSE file in the root directory of this source tree.
|
||||
*/
|
||||
const cssnano = require('cssnano');
|
||||
const autoprefixer = require('autoprefixer');
|
||||
const postcss = require('postcss');
|
||||
const path = require('path');
|
||||
const escapeStringRegexp = require('escape-string-regexp');
|
||||
|
||||
function getSubDir(file, refDir) {
|
||||
const subDir = path.dirname(path.relative(refDir, file)).replace(/\\/g, '/');
|
||||
return subDir !== '.' && !subDir.includes('..') ? subDir : null;
|
||||
}
|
||||
|
||||
function getLanguage(file, refDir) {
|
||||
const separator = escapeStringRegexp(path.sep);
|
||||
const baseDir = escapeStringRegexp(path.basename(refDir));
|
||||
const regexSubFolder = new RegExp(
|
||||
`${baseDir}${separator}(.*?)${separator}.*`,
|
||||
);
|
||||
const match = regexSubFolder.exec(file);
|
||||
|
||||
// Avoid misinterpreting subdirectory as language
|
||||
const env = require('./env.js');
|
||||
if (match && env.translation.enabled) {
|
||||
const enabledLanguages = env.translation
|
||||
.enabledLanguages()
|
||||
.map(language => language.tag);
|
||||
if (enabledLanguages.indexOf(match[1]) !== -1) {
|
||||
return match[1];
|
||||
}
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
function isSeparateCss(file, separateDirs) {
|
||||
if (!separateDirs) {
|
||||
return false;
|
||||
}
|
||||
for (let i = 0; i < separateDirs.length; i++) {
|
||||
if (file.includes(separateDirs[i])) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
function minifyCss(cssContent) {
|
||||
return cssnano
|
||||
.process(cssContent, {
|
||||
preset: 'default',
|
||||
zindex: false,
|
||||
})
|
||||
.then(result => result.css);
|
||||
}
|
||||
|
||||
function autoPrefixCss(cssContent) {
|
||||
return postcss([autoprefixer])
|
||||
.process(cssContent, {
|
||||
from: undefined,
|
||||
})
|
||||
.then(result => result.css);
|
||||
}
|
||||
|
||||
function replaceAssetsLink(oldContent, location) {
|
||||
let fencedBlock = false;
|
||||
const lines = oldContent.split('\n').map(line => {
|
||||
if (line.trim().startsWith('```')) {
|
||||
fencedBlock = !fencedBlock;
|
||||
}
|
||||
return fencedBlock
|
||||
? line
|
||||
: line.replace(/\]\(assets\//g, `](${location}/assets/`);
|
||||
});
|
||||
return lines.join('\n');
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
getSubDir,
|
||||
getLanguage,
|
||||
isSeparateCss,
|
||||
minifyCss,
|
||||
autoPrefixCss,
|
||||
replaceAssetsLink,
|
||||
};
|
334
packages/docusaurus-1.x/lib/server/versionFallback.js
Normal file
334
packages/docusaurus-1.x/lib/server/versionFallback.js
Normal file
|
@ -0,0 +1,334 @@
|
|||
/**
|
||||
* Copyright (c) 2017-present, Facebook, Inc.
|
||||
*
|
||||
* This source code is licensed under the MIT license found in the
|
||||
* LICENSE file in the root directory of this source tree.
|
||||
*/
|
||||
|
||||
const CWD = process.cwd();
|
||||
const glob = require('glob');
|
||||
const fs = require('fs');
|
||||
const path = require('path');
|
||||
|
||||
const metadataUtils = require('./metadataUtils');
|
||||
|
||||
const env = require('./env.js');
|
||||
const utils = require('./utils.js');
|
||||
const loadConfig = require('./config');
|
||||
|
||||
const siteConfig = loadConfig(`${CWD}/siteConfig.js`);
|
||||
|
||||
const ENABLE_TRANSLATION = fs.existsSync(`${CWD}/languages.js`);
|
||||
|
||||
let versions;
|
||||
if (fs.existsSync(`${CWD}/versions.json`)) {
|
||||
versions = require(`${CWD}/versions.json`);
|
||||
} else {
|
||||
versions = [];
|
||||
}
|
||||
|
||||
let languages;
|
||||
if (fs.existsSync(`${CWD}/languages.js`)) {
|
||||
languages = require(`${CWD}/languages.js`);
|
||||
} else {
|
||||
languages = [
|
||||
{
|
||||
enabled: true,
|
||||
name: 'English',
|
||||
tag: 'en',
|
||||
},
|
||||
];
|
||||
}
|
||||
|
||||
const versionFolder = `${CWD}/versioned_docs/`;
|
||||
|
||||
// available stores doc ids of documents that are available for
|
||||
// each version
|
||||
const available = {};
|
||||
// versionFiles is used to keep track of what file to use with a
|
||||
// given version/id of a document
|
||||
const versionFiles = {};
|
||||
const files = glob.sync(`${versionFolder}**`);
|
||||
files.forEach(file => {
|
||||
const ext = path.extname(file);
|
||||
if (ext !== '.md' && ext !== '.markdown') {
|
||||
return;
|
||||
}
|
||||
const res = metadataUtils.extractMetadata(fs.readFileSync(file, 'utf8'));
|
||||
const metadata = res.metadata;
|
||||
|
||||
if (!metadata.original_id) {
|
||||
console.error(
|
||||
`No 'original_id' field found in ${file}. Perhaps you forgot to add it when importing prior versions of your docs?`,
|
||||
);
|
||||
throw new Error(
|
||||
`No 'original_id' field found in ${file}. Perhaps you forgot to add it when importing prior versions of your docs?`,
|
||||
);
|
||||
}
|
||||
if (!metadata.id) {
|
||||
console.error(`No 'id' field found in ${file}.`);
|
||||
throw new Error(`No 'id' field found in ${file}.`);
|
||||
} else if (metadata.id.indexOf('version-') === -1) {
|
||||
console.error(
|
||||
`The 'id' field in ${file} is missing the expected 'version-XX-' prefix. Perhaps you forgot to add it when importing prior versions of your docs?`,
|
||||
);
|
||||
throw new Error(
|
||||
`The 'id' field in ${file} is missing the expected 'version-XX-' prefix. Perhaps you forgot to add it when importing prior versions of your docs?`,
|
||||
);
|
||||
}
|
||||
|
||||
// The version will be between "version-" and "-<metadata.original_id>"
|
||||
// e.g. version-1.0.0-beta.2-doc1 => 1.0.0-beta.2
|
||||
// e.g. version-1.0.0-doc2 => 1.0.0
|
||||
// e.g. version-1.0.0-getting-started => 1.0.0
|
||||
const version = metadata.id.substring(
|
||||
metadata.id.indexOf('version-') + 8, // version- is 8 characters
|
||||
metadata.id.lastIndexOf(`-${metadata.original_id}`),
|
||||
);
|
||||
|
||||
// the original_id should be namespaced according to subdir to allow duplicate id in different subfolder
|
||||
const subDir = utils.getSubDir(
|
||||
file,
|
||||
path.join(versionFolder, `version-${version}`),
|
||||
);
|
||||
if (subDir) {
|
||||
metadata.original_id = `${subDir}/${metadata.original_id}`;
|
||||
}
|
||||
|
||||
if (!(metadata.original_id in available)) {
|
||||
available[metadata.original_id] = new Set();
|
||||
}
|
||||
available[metadata.original_id].add(version);
|
||||
|
||||
if (!(version in versionFiles)) {
|
||||
versionFiles[version] = {};
|
||||
}
|
||||
versionFiles[version][metadata.original_id] = file;
|
||||
});
|
||||
|
||||
// returns the version to use for a document based on its id and
|
||||
// what the requested version is
|
||||
function docVersion(id, reqVersion) {
|
||||
if (!available[id]) {
|
||||
return null;
|
||||
}
|
||||
// iterate through versions until a version less than or equal to the requested
|
||||
// is found, then check if that version has an available file to use
|
||||
let requestedFound = false;
|
||||
for (let i = 0; i < versions.length; i++) {
|
||||
if (versions[i] === reqVersion) {
|
||||
requestedFound = true;
|
||||
}
|
||||
if (requestedFound && available[id].has(versions[i])) {
|
||||
return versions[i];
|
||||
}
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
// returns whether a given file has content that differ from the
|
||||
// document with the given id
|
||||
function diffLatestDoc(file, id) {
|
||||
if (versions.length === 0) {
|
||||
return true;
|
||||
}
|
||||
|
||||
const latest = versions[0];
|
||||
|
||||
let version;
|
||||
try {
|
||||
version = docVersion(id, latest);
|
||||
} catch (e) {
|
||||
console.error(e);
|
||||
process.exit(1);
|
||||
}
|
||||
if (!version) {
|
||||
return true;
|
||||
}
|
||||
const latestFile = versionFiles[version][id];
|
||||
|
||||
if (!latestFile || !fs.existsSync(latestFile)) {
|
||||
return true;
|
||||
}
|
||||
|
||||
return (
|
||||
metadataUtils
|
||||
.extractMetadata(fs.readFileSync(latestFile, 'utf8'))
|
||||
.rawContent.trim() !==
|
||||
metadataUtils
|
||||
.extractMetadata(fs.readFileSync(file, 'utf8'))
|
||||
.rawContent.trim()
|
||||
);
|
||||
}
|
||||
|
||||
// return metadata for a versioned file given the file, its version (requested),
|
||||
// the version of the file to be used, and its language
|
||||
function processVersionMetadata(file, version, useVersion, language) {
|
||||
const metadata = metadataUtils.extractMetadata(fs.readFileSync(file, 'utf8'))
|
||||
.metadata;
|
||||
|
||||
// Add subdirectory information to versioned_doc metadata
|
||||
// Example: `versioned_docs/version-1.1.6/projectA/readme.md` file with id `version-1.1.6-readme`
|
||||
// and original_id `readme` will have metadata id of `version-1.1.6-projectA/readme` and original_id `projectA/readme`
|
||||
const subDir = utils.getSubDir(
|
||||
file,
|
||||
path.join(CWD, 'versioned_docs', `version-${useVersion}`),
|
||||
);
|
||||
if (subDir) {
|
||||
metadata.original_id = `${subDir}/${metadata.original_id}`;
|
||||
metadata.id = metadata.id.replace(
|
||||
`version-${useVersion}-`,
|
||||
`version-${useVersion}-${subDir}/`,
|
||||
);
|
||||
}
|
||||
|
||||
metadata.source = subDir
|
||||
? `version-${useVersion}/${subDir}/${path.basename(file)}`
|
||||
: `version-${useVersion}/${path.basename(file)}`;
|
||||
|
||||
const latestVersion = versions[0];
|
||||
|
||||
const docsPart = `${siteConfig.docsUrl ? `${siteConfig.docsUrl}/` : ''}`;
|
||||
const versionPart = `${version !== latestVersion ? `${version}/` : ''}`;
|
||||
if (!ENABLE_TRANSLATION && !siteConfig.useEnglishUrl) {
|
||||
metadata.permalink = `${docsPart}${versionPart}${
|
||||
metadata.original_id
|
||||
}.html`;
|
||||
} else {
|
||||
metadata.permalink = `${docsPart}${language}/${versionPart}${
|
||||
metadata.original_id
|
||||
}.html`;
|
||||
}
|
||||
metadata.id = metadata.id.replace(
|
||||
`version-${useVersion}-`,
|
||||
`version-${version}-`,
|
||||
);
|
||||
metadata.localized_id = metadata.id;
|
||||
metadata.id = (env.translation.enabled ? `${language}-` : '') + metadata.id;
|
||||
metadata.language = language;
|
||||
metadata.version = version;
|
||||
|
||||
return metadata;
|
||||
}
|
||||
|
||||
// return all metadata of versioned documents
|
||||
function docData() {
|
||||
const allIds = new Set();
|
||||
Object.keys(versionFiles).forEach(version => {
|
||||
Object.keys(versionFiles[version]).forEach(id => {
|
||||
allIds.add(id);
|
||||
});
|
||||
});
|
||||
|
||||
const metadatas = [];
|
||||
|
||||
languages
|
||||
.filter(language => language.enabled)
|
||||
.forEach(language => {
|
||||
versions.forEach(version => {
|
||||
allIds.forEach(id => {
|
||||
let useVersion;
|
||||
try {
|
||||
useVersion = docVersion(id, version);
|
||||
} catch (e) {
|
||||
console.log(e);
|
||||
process.exit(1);
|
||||
}
|
||||
if (!useVersion) {
|
||||
return;
|
||||
}
|
||||
const file = versionFiles[useVersion][id];
|
||||
|
||||
metadatas.push(
|
||||
processVersionMetadata(file, version, useVersion, language.tag),
|
||||
);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
return metadatas;
|
||||
}
|
||||
|
||||
// return the version of the sidebar to use given a requested version
|
||||
function sidebarVersion(reqVersion) {
|
||||
// iterate through versions until a version less than or equal to the requested
|
||||
// is found, then check if that version has an available file to use
|
||||
let requestedFound = false;
|
||||
for (let i = 0; i < versions.length; i++) {
|
||||
if (versions[i] === reqVersion) {
|
||||
requestedFound = true;
|
||||
}
|
||||
if (
|
||||
requestedFound &&
|
||||
fs.existsSync(
|
||||
`${CWD}/versioned_sidebars/version-${versions[i]}-sidebars.json`,
|
||||
)
|
||||
) {
|
||||
return versions[i];
|
||||
}
|
||||
}
|
||||
throw new Error(
|
||||
`No sidebar file available to use for version ${reqVersion}. Verify that 'version-${reqVersion}-sidebars.json' exists.`,
|
||||
);
|
||||
}
|
||||
|
||||
// return whether or not the current sidebars.json file differs from the
|
||||
// latest versioned one
|
||||
function diffLatestSidebar() {
|
||||
if (versions.length === 0) {
|
||||
return true;
|
||||
}
|
||||
const latest = versions[0];
|
||||
|
||||
const version = sidebarVersion(latest);
|
||||
const latestSidebar = `${CWD}/versioned_sidebars/version-${version}-sidebars.json`;
|
||||
if (!fs.existsSync(latestSidebar)) {
|
||||
return true;
|
||||
}
|
||||
const currentSidebar = `${CWD}/sidebars.json`;
|
||||
// if no current sidebar file, return false so no sidebar file gets copied
|
||||
if (!fs.existsSync(currentSidebar)) {
|
||||
return false;
|
||||
}
|
||||
|
||||
// compare for equality between latest version sidebar with version prefixes
|
||||
// stripped and current sidebar
|
||||
return (
|
||||
JSON.stringify(JSON.parse(fs.readFileSync(latestSidebar, 'utf8'))).replace(
|
||||
new RegExp(`version-${version}-`, 'g'),
|
||||
'',
|
||||
) !== JSON.stringify(JSON.parse(fs.readFileSync(currentSidebar, 'utf8')))
|
||||
);
|
||||
}
|
||||
|
||||
// return all versioned sidebar data
|
||||
function sidebarData() {
|
||||
const allSidebars = {};
|
||||
|
||||
for (let i = 0; i < versions.length; i++) {
|
||||
const version = sidebarVersion(versions[i]);
|
||||
const sidebar = JSON.parse(
|
||||
fs
|
||||
.readFileSync(
|
||||
`${CWD}/versioned_sidebars/version-${version}-sidebars.json`,
|
||||
'utf8',
|
||||
)
|
||||
.replace(
|
||||
new RegExp(`version-${version}-`, 'g'),
|
||||
`version-${versions[i]}-`,
|
||||
),
|
||||
);
|
||||
Object.assign(allSidebars, sidebar);
|
||||
}
|
||||
return allSidebars;
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
docVersion,
|
||||
diffLatestDoc,
|
||||
processVersionMetadata,
|
||||
docData,
|
||||
sidebarVersion,
|
||||
diffLatestSidebar,
|
||||
sidebarData,
|
||||
};
|
Loading…
Add table
Add a link
Reference in a new issue