mirror of
https://github.com/facebook/docusaurus.git
synced 2025-05-10 07:37:19 +02:00
* Duplicate code in readMetadata.js & versionFallback.js #725 * Putting back package-lock.json * Rename lib/server/utilsMetadata.js -> lib/server/metadataUtils.js * Update splitHeader + extractMetadata * Update prettier
This commit is contained in:
parent
de8da1ca54
commit
a77ae958db
7 changed files with 100 additions and 101 deletions
|
@ -12,6 +12,7 @@ const fs = require('fs');
|
|||
const glob = require('glob');
|
||||
const path = require('path');
|
||||
const readMetadata = require('./server/readMetadata.js');
|
||||
const metadataUtils = require('./server/metadataUtils.js');
|
||||
|
||||
const CWD = process.cwd();
|
||||
|
||||
|
@ -99,7 +100,7 @@ if (fs.existsSync(CWD + '/versioned_docs/version-' + currentVersion)) {
|
|||
if (extension !== '.md' && extension !== '.markdown') {
|
||||
return;
|
||||
}
|
||||
const res = readMetadata.extractMetadata(fs.readFileSync(file, 'utf8'));
|
||||
const res = metadataUtils.extractMetadata(fs.readFileSync(file, 'utf8'));
|
||||
const metadata = res.metadata;
|
||||
const rawContent = res.rawContent;
|
||||
if (!metadata.id) {
|
||||
|
|
|
@ -8,6 +8,8 @@
|
|||
async function execute() {
|
||||
const extractTranslations = require('../write-translations.js');
|
||||
|
||||
const metadataUtils = require('./metadataUtils');
|
||||
|
||||
const CWD = process.cwd();
|
||||
const cssnano = require('cssnano');
|
||||
const fs = require('fs-extra');
|
||||
|
@ -147,8 +149,9 @@ async function execute() {
|
|||
return;
|
||||
}
|
||||
|
||||
let rawContent = readMetadata.extractMetadata(fs.readFileSync(file, 'utf8'))
|
||||
.rawContent;
|
||||
let rawContent = metadataUtils.extractMetadata(
|
||||
fs.readFileSync(file, 'utf8')
|
||||
).rawContent;
|
||||
|
||||
const language = metadata.language;
|
||||
|
||||
|
@ -261,7 +264,7 @@ async function execute() {
|
|||
.replace('-', '/')
|
||||
.replace('-', '/')
|
||||
.replace(/\.md$/, '.html');
|
||||
const result = readMetadata.extractMetadata(
|
||||
const result = metadataUtils.extractMetadata(
|
||||
fs.readFileSync(normalizedFile, {encoding: 'utf8'})
|
||||
);
|
||||
const rawContent = result.rawContent;
|
||||
|
|
66
lib/server/metadataUtils.js
Normal file
66
lib/server/metadataUtils.js
Normal file
|
@ -0,0 +1,66 @@
|
|||
/**
|
||||
* Copyright (c) 2017-present, Facebook, Inc.
|
||||
*
|
||||
* This source code is licensed under the MIT license found in the
|
||||
* LICENSE file in the root directory of this source tree.
|
||||
*/
|
||||
|
||||
// split markdown header
|
||||
function splitHeader(content) {
|
||||
// New line characters need to handle all operating systems.
|
||||
const lines = content.split(/\r?\n/);
|
||||
if (lines[0] !== '---') {
|
||||
return {};
|
||||
}
|
||||
let i = 1;
|
||||
for (; i < lines.length - 1; ++i) {
|
||||
if (lines[i] === '---') {
|
||||
break;
|
||||
}
|
||||
}
|
||||
return {
|
||||
header: lines.slice(1, i + 1).join('\n'),
|
||||
content: lines.slice(i + 1).join('\n'),
|
||||
};
|
||||
}
|
||||
|
||||
// Extract markdown metadata header
|
||||
function extractMetadata(content) {
|
||||
const metadata = {};
|
||||
const both = splitHeader(content);
|
||||
|
||||
// if no content returned, then that means there was no header, and both.header is the content
|
||||
if (!both.content) {
|
||||
if (!both.header) {
|
||||
// if no both returned, then that means there was no header and no content => we return the current content of the file
|
||||
return {metadata, rawContent: content};
|
||||
}
|
||||
return {metadata, rawContent: both.header};
|
||||
}
|
||||
|
||||
// New line characters => to handle all operating systems.
|
||||
const lines = both.header.split(/\r?\n/);
|
||||
|
||||
// Loop that add to metadata the current content of the fields of the header
|
||||
// Like the format:
|
||||
// id:
|
||||
// title:
|
||||
// original_id:
|
||||
for (let i = 0; i < lines.length - 1; ++i) {
|
||||
const keyvalue = lines[i].split(':');
|
||||
const key = keyvalue[0].trim();
|
||||
let value = keyvalue
|
||||
.slice(1)
|
||||
.join(':')
|
||||
.trim();
|
||||
try {
|
||||
value = JSON.parse(value);
|
||||
} catch (e) {}
|
||||
metadata[key] = value;
|
||||
}
|
||||
return {metadata, rawContent: both.content};
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
extractMetadata,
|
||||
};
|
|
@ -12,6 +12,8 @@ const fs = require('fs');
|
|||
const glob = require('glob');
|
||||
const chalk = require('chalk');
|
||||
|
||||
const metadataUtils = require('./metadataUtils');
|
||||
|
||||
const env = require('./env.js');
|
||||
const siteConfig = require(CWD + '/siteConfig.js');
|
||||
const versionFallback = require('./versionFallback.js');
|
||||
|
@ -79,52 +81,11 @@ function readSidebar() {
|
|||
return order;
|
||||
}
|
||||
|
||||
// split markdown header
|
||||
function splitHeader(content) {
|
||||
// New line characters need to handle all operating systems.
|
||||
const lines = content.split(/\r?\n/);
|
||||
if (lines[0] !== '---') {
|
||||
return {};
|
||||
}
|
||||
let i = 1;
|
||||
for (; i < lines.length - 1; ++i) {
|
||||
if (lines[i] === '---') {
|
||||
break;
|
||||
}
|
||||
}
|
||||
return {
|
||||
header: lines.slice(1, i + 1).join('\n'),
|
||||
content: lines.slice(i + 1).join('\n'),
|
||||
};
|
||||
}
|
||||
|
||||
// Extract markdown metadata header
|
||||
function extractMetadata(content) {
|
||||
const metadata = {};
|
||||
const both = splitHeader(content);
|
||||
if (Object.keys(both).length === 0) {
|
||||
return {metadata, rawContent: content};
|
||||
}
|
||||
const lines = both.header.split('\n');
|
||||
for (let i = 0; i < lines.length - 1; ++i) {
|
||||
const keyvalue = lines[i].split(':');
|
||||
const key = keyvalue[0].trim();
|
||||
let value = keyvalue
|
||||
.slice(1)
|
||||
.join(':')
|
||||
.trim();
|
||||
try {
|
||||
value = JSON.parse(value);
|
||||
} catch (e) {}
|
||||
metadata[key] = value;
|
||||
}
|
||||
return {metadata, rawContent: both.content};
|
||||
}
|
||||
|
||||
// process the metadata for a document found in either 'docs' or 'translated_docs'
|
||||
function processMetadata(file, refDir) {
|
||||
const result = extractMetadata(fs.readFileSync(file, 'utf8'));
|
||||
const result = metadataUtils.extractMetadata(fs.readFileSync(file, 'utf8'));
|
||||
const language = utils.getLanguage(file, refDir) || 'en';
|
||||
|
||||
const metadata = {};
|
||||
for (const fieldName of Object.keys(result.metadata)) {
|
||||
if (SupportedHeaderFields.has(fieldName)) {
|
||||
|
@ -367,7 +328,9 @@ function generateMetadataBlog() {
|
|||
.replace('-', '/')
|
||||
.replace('-', '/')
|
||||
.replace(/\.md$/, '.html');
|
||||
const result = extractMetadata(fs.readFileSync(file, {encoding: 'utf8'}));
|
||||
const result = metadataUtils.extractMetadata(
|
||||
fs.readFileSync(file, {encoding: 'utf8'})
|
||||
);
|
||||
const rawContent = result.rawContent;
|
||||
const metadata = Object.assign(
|
||||
{path: filePath, content: rawContent},
|
||||
|
@ -414,7 +377,6 @@ function generateMetadataBlog() {
|
|||
module.exports = {
|
||||
getDocsPath,
|
||||
readSidebar,
|
||||
extractMetadata,
|
||||
processMetadata,
|
||||
generateMetadataDocs,
|
||||
generateMetadataBlog,
|
||||
|
|
|
@ -8,6 +8,8 @@
|
|||
function execute(port) {
|
||||
const extractTranslations = require('../write-translations');
|
||||
|
||||
const metadataUtils = require('./metadataUtils');
|
||||
|
||||
const env = require('./env.js');
|
||||
const translation = require('./translation');
|
||||
const express = require('express');
|
||||
|
@ -186,8 +188,9 @@ function execute(port) {
|
|||
return;
|
||||
}
|
||||
|
||||
let rawContent = readMetadata.extractMetadata(fs.readFileSync(file, 'utf8'))
|
||||
.rawContent;
|
||||
let rawContent = metadataUtils.extractMetadata(
|
||||
fs.readFileSync(file, 'utf8')
|
||||
).rawContent;
|
||||
|
||||
// generate table of contents if appropriate
|
||||
if (rawContent && rawContent.indexOf(TABLE_OF_CONTENTS_TOKEN) !== -1) {
|
||||
|
@ -326,7 +329,7 @@ function execute(port) {
|
|||
file = file.replace(new RegExp('/', 'g'), '-');
|
||||
file = join(CWD, 'blog', file);
|
||||
|
||||
const result = readMetadata.extractMetadata(
|
||||
const result = metadataUtils.extractMetadata(
|
||||
fs.readFileSync(file, {encoding: 'utf8'})
|
||||
);
|
||||
let rawContent = result.rawContent;
|
||||
|
|
|
@ -11,6 +11,8 @@ const fs = require('fs');
|
|||
const path = require('path');
|
||||
const assert = require('assert');
|
||||
|
||||
const metadataUtils = require('./metadataUtils');
|
||||
|
||||
const env = require('./env.js');
|
||||
const utils = require('./utils.js');
|
||||
const siteConfig = require(CWD + '/siteConfig.js');
|
||||
|
@ -37,50 +39,6 @@ if (fs.existsSync(CWD + '/languages.js')) {
|
|||
];
|
||||
}
|
||||
|
||||
/*****************************************************************/
|
||||
|
||||
// included to prevent cyclical dependency with readMetadata.js
|
||||
|
||||
function splitHeader(content) {
|
||||
const lines = content.split(/\r?\n/);
|
||||
let i = 1;
|
||||
for (; i < lines.length - 1; ++i) {
|
||||
if (lines[i] === '---') {
|
||||
break;
|
||||
}
|
||||
}
|
||||
return {
|
||||
header: lines.slice(1, i + 1).join('\n'),
|
||||
content: lines.slice(i + 1).join('\n'),
|
||||
};
|
||||
}
|
||||
|
||||
// Extract markdown metadata header
|
||||
function extractMetadata(content) {
|
||||
const metadata = {};
|
||||
const both = splitHeader(content);
|
||||
// if no content returned, then that means there was no header, and both.header is the content
|
||||
if (!both.content) {
|
||||
return {metadata, rawContent: both.header};
|
||||
}
|
||||
const lines = both.header.split(/\r?\n/);
|
||||
for (let i = 0; i < lines.length - 1; ++i) {
|
||||
const keyvalue = lines[i].split(':');
|
||||
const key = keyvalue[0].trim();
|
||||
let value = keyvalue
|
||||
.slice(1)
|
||||
.join(':')
|
||||
.trim();
|
||||
try {
|
||||
value = JSON.parse(value);
|
||||
} catch (e) {}
|
||||
metadata[key] = value;
|
||||
}
|
||||
return {metadata, rawContent: both.content};
|
||||
}
|
||||
|
||||
/*****************************************************************/
|
||||
|
||||
const versionFolder = CWD + '/versioned_docs/';
|
||||
|
||||
// available stores doc ids of documents that are available for
|
||||
|
@ -95,7 +53,7 @@ files.forEach(file => {
|
|||
if (ext !== '.md' && ext !== '.markdown') {
|
||||
return;
|
||||
}
|
||||
const res = extractMetadata(fs.readFileSync(file, 'utf8'));
|
||||
const res = metadataUtils.extractMetadata(fs.readFileSync(file, 'utf8'));
|
||||
const metadata = res.metadata;
|
||||
|
||||
if (!metadata.original_id) {
|
||||
|
@ -186,15 +144,20 @@ function diffLatestDoc(file, id) {
|
|||
}
|
||||
|
||||
return (
|
||||
extractMetadata(fs.readFileSync(latestFile, 'utf8')).rawContent.trim() !==
|
||||
extractMetadata(fs.readFileSync(file, 'utf8')).rawContent.trim()
|
||||
metadataUtils
|
||||
.extractMetadata(fs.readFileSync(latestFile, 'utf8'))
|
||||
.rawContent.trim() !==
|
||||
metadataUtils
|
||||
.extractMetadata(fs.readFileSync(file, 'utf8'))
|
||||
.rawContent.trim()
|
||||
);
|
||||
}
|
||||
|
||||
// return metadata for a versioned file given the file, its version (requested),
|
||||
// the version of the file to be used, and its language
|
||||
function processVersionMetadata(file, version, useVersion, language) {
|
||||
const metadata = extractMetadata(fs.readFileSync(file, 'utf8')).metadata;
|
||||
const metadata = metadataUtils.extractMetadata(fs.readFileSync(file, 'utf8'))
|
||||
.metadata;
|
||||
|
||||
// Add subdirectory information to versioned_doc metadata
|
||||
// Example: `versioned_docs/version-1.1.6/projectA/readme.md` file with id `version-1.1.6-readme`
|
||||
|
|
|
@ -15,6 +15,7 @@ const chalk = require('chalk');
|
|||
const readMetadata = require('./server/readMetadata.js');
|
||||
const utils = require('./server/utils.js');
|
||||
const versionFallback = require('./server/versionFallback.js');
|
||||
const metadataUtils = require('./server/metadataUtils.js');
|
||||
const env = require('./server/env.js');
|
||||
|
||||
const CWD = process.cwd();
|
||||
|
@ -85,7 +86,7 @@ files.forEach(file => {
|
|||
return;
|
||||
}
|
||||
|
||||
const res = readMetadata.extractMetadata(fs.readFileSync(file, 'utf8'));
|
||||
const res = metadataUtils.extractMetadata(fs.readFileSync(file, 'utf8'));
|
||||
let metadata = res.metadata;
|
||||
// Don't version any docs without any metadata whatsoever.
|
||||
if (Object.keys(metadata).length === 0) {
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue