docusaurus/lib/server/generate.js
Cheng Lou 76e423fba3 Use path module instead of string concat / (#255)
This also in part prepares for #253. I discovered a bug when setting `projectName` to `""`, and some `"build/" + projectName + "/foo"` concatenated into `"build//foo"`. Granted, it's a hack, but we should use `path` anyway.

Test: tested on https://github.com/BuckleScript/bucklescript.github.io. Seems working
2017-12-04 09:32:02 -08:00

481 lines
15 KiB
JavaScript

/**
* Copyright (c) 2017-present, Facebook, Inc.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree.
*/
function execute() {
const extractTranslations = require("../write-translations.js");
const CWD = process.cwd();
const fs = require("fs-extra");
const readMetadata = require("./readMetadata.js");
const renderToStaticMarkup = require("react-dom/server").renderToStaticMarkup;
const path = require("path");
const toSlug = require("../core/toSlug.js");
const React = require("react");
const mkdirp = require("mkdirp");
const glob = require("glob");
const chalk = require("chalk");
const Site = require("../core/Site.js");
const siteConfig = require(CWD + "/siteConfig.js");
const translate = require("./translate.js");
const versionFallback = require("./versionFallback.js");
const feed = require("./feed.js");
const sitemap = require("./sitemap.js");
const join = path.join;
const ENABLE_TRANSLATION = fs.existsSync(join(CWD, "languages.js"));
const ENABLE_VERSIONING = fs.existsSync(join(CWD, "versions.json"));
let languages;
if (ENABLE_TRANSLATION) {
languages = require(CWD + "/languages.js");
} else {
languages = [
{
enabled: true,
name: "English",
tag: "en"
}
];
}
// create the folder path for a file if it does not exist, then write the file
function writeFileAndCreateFolder(file, content) {
mkdirp.sync(file.replace(new RegExp("/[^/]*$"), ""));
fs.writeFileSync(file, content);
}
const TABLE_OF_CONTENTS_TOKEN = "<AUTOGENERATED_TABLE_OF_CONTENTS>";
// takes the content of a doc article and returns the content with a table of
// contents inserted
const insertTableOfContents = rawContent => {
const regexp = /\n###\s+(`.*`.*)\n/g;
let match;
const headers = [];
while ((match = regexp.exec(rawContent))) {
headers.push(match[1]);
}
const tableOfContents = headers
.map(header => ` - [${header}](#${toSlug(header)})`)
.join("\n");
return rawContent.replace(TABLE_OF_CONTENTS_TOKEN, tableOfContents);
};
// returns true if a file should be excluded from concatentation to
// default Docusaurus styles
function isSeparateCss(file) {
if (!siteConfig.separateCss) {
return false;
}
for (let i = 0; i < siteConfig.separateCss.length; i++) {
if (file.includes(siteConfig.separateCss[i])) {
return true;
}
}
return false;
}
console.log("generate.js triggered...");
// array of tags of enabled languages
const enabledLanguages = [];
languages.filter(lang => lang.enabled).map(lang => {
enabledLanguages.push(lang.tag);
});
readMetadata.generateMetadataDocs();
const Metadata = require("../core/metadata.js");
// TODO: what if the project is a github org page? We should not use
// siteConfig.projectName in this case. Otherwise a GitHub org doc URL would
// look weird: https://myorg.github.io/myorg/docs
// TODO: siteConfig.projectName is a misnomer. The actual project name is
// `title`. `projectName` is only used to generate a folder, which isn't
// needed when the project's a GitHub org page
const buildDir = join(CWD, "build", siteConfig.projectName);
// mdToHtml is a map from a markdown file name to its html link, used to
// change relative markdown links that work on GitHub into actual site links
const mdToHtml = {};
Object.keys(Metadata).forEach(id => {
const metadata = Metadata[id];
if (metadata.language !== "en" || metadata.original_id) {
return;
}
let htmlLink =
siteConfig.baseUrl + metadata.permalink.replace("/next/", "/");
if (htmlLink.includes("/docs/en/")) {
htmlLink = htmlLink.replace("/docs/en/", "/docs/en/VERSION/");
} else {
htmlLink = htmlLink.replace("/docs/", "/docs/VERSION/");
}
mdToHtml[metadata.source] = htmlLink;
});
const DocsLayout = require("../core/DocsLayout.js");
const Redirect = require("../core/Redirect.js");
fs.removeSync(join(CWD, "build"));
// create html files for all docs by going through all doc ids
Object.keys(Metadata).forEach(id => {
const metadata = Metadata[id];
// determine what file to use according to its id
let file;
if (metadata.original_id) {
if (ENABLE_TRANSLATION && metadata.language !== "en") {
file = join(CWD, "translated_docs", metadata.language, metadata.source);
} else {
file = join(CWD, "versioned_docs", metadata.source);
}
} else {
if (metadata.language === "en") {
file = join(CWD, "..", readMetadata.getDocsPath(), metadata.source);
} else {
file = join(CWD, "translated_docs", metadata.language, metadata.source);
}
}
if (!fs.existsSync(file)) {
return;
}
let rawContent = readMetadata.extractMetadata(fs.readFileSync(file, "utf8"))
.rawContent;
const language = metadata.language;
// generate table of contents if appropriate
if (rawContent && rawContent.indexOf(TABLE_OF_CONTENTS_TOKEN) != -1) {
rawContent = insertTableOfContents(rawContent);
}
let latestVersion;
if (ENABLE_VERSIONING) {
latestVersion = JSON.parse(
fs.readFileSync(join(CWD, "versions.json"), "utf8")
)[0];
}
// replace any links to markdown files to their website html links
Object.keys(mdToHtml).forEach(function(key, index) {
let link = mdToHtml[key];
link = link.replace("/en/", "/" + language + "/");
link = link.replace(
"/VERSION/",
metadata.version && metadata.version !== latestVersion
? "/" + metadata.version + "/"
: "/"
);
// replace relative links without "./"
rawContent = rawContent.replace(
new RegExp("\\]\\(" + key, "g"),
"](" + link
);
// replace relative links with "./"
rawContent = rawContent.replace(
new RegExp("\\]\\(\\./" + key, "g"),
"](" + link
);
});
// replace any relative links to static assets to absolute links
rawContent = rawContent.replace(
/\]\(assets\//g,
"](" + siteConfig.baseUrl + "docs/assets/"
);
const docComp = (
<DocsLayout metadata={metadata} language={language} config={siteConfig}>
{rawContent}
</DocsLayout>
);
const str = renderToStaticMarkup(docComp);
const targetFile = join(buildDir, metadata.permalink);
writeFileAndCreateFolder(targetFile, str);
// generate english page redirects when languages are enabled
if (ENABLE_TRANSLATION && metadata.permalink.indexOf("docs/en") !== -1) {
const redirectComp = (
<Redirect
metadata={metadata}
language={language}
config={siteConfig}
redirect={siteConfig.baseUrl + metadata.permalink}
/>
);
const redirectStr = renderToStaticMarkup(redirectComp);
// create a redirects page for doc files
const redirectFile = join(
buildDir,
metadata.permalink.replace("docs/en", "docs")
);
writeFileAndCreateFolder(redirectFile, redirectStr);
}
});
// copy docs assets if they exist
if (fs.existsSync(join(CWD, "..", readMetadata.getDocsPath(), "assets"))) {
fs.copySync(
join(CWD, readMetadata.getDocsPath(), "assets"),
join(buildDir, "docs", "assets")
);
}
// create html files for all blog posts (each article)
if (fs.existsSync(join(__dirname, "..", "core", "MetadataBlog.js"))) {
fs.removeSync(join(__dirname, "..", "core", "MetadataBlog.js"));
}
readMetadata.generateMetadataBlog();
const MetadataBlog = require("../core/MetadataBlog.js");
const BlogPostLayout = require("../core/BlogPostLayout.js");
let files = glob.sync(join(CWD, "blog", "**", "*.*"));
files
.sort()
.reverse()
.forEach(file => {
const extension = path.extname(file);
if (extension !== ".md" && extension !== ".markdown") {
return;
}
// convert filename to use slashes
const filePath = path
.basename(file)
.replace("-", "/")
.replace("-", "/")
.replace("-", "/")
.replace(/\.md$/, ".html");
const result = readMetadata.extractMetadata(
fs.readFileSync(file, { encoding: "utf8" })
);
const rawContent = result.rawContent;
const metadata = Object.assign(
{ path: filePath, content: rawContent },
result.metadata
);
metadata.id = metadata.title;
let language = "en";
const blogPostComp = (
<BlogPostLayout
metadata={metadata}
language={language}
config={siteConfig}
>
{rawContent}
</BlogPostLayout>
);
const str = renderToStaticMarkup(blogPostComp);
let targetFile = join(buildDir, "blog", filePath);
writeFileAndCreateFolder(targetFile, str);
});
// create html files for all blog pages (collections of article previews)
const BlogPageLayout = require("../core/BlogPageLayout.js");
const perPage = 10;
for (let page = 0; page < Math.ceil(MetadataBlog.length / perPage); page++) {
let language = "en";
const metadata = { page: page, perPage: perPage };
const blogPageComp = (
<BlogPageLayout
metadata={metadata}
language={language}
config={siteConfig}
/>
);
const str = renderToStaticMarkup(blogPageComp);
let targetFile = join(
buildDir,
"blog",
page > 0 ? "page" + (page + 1) : "",
"index.html"
);
writeFileAndCreateFolder(targetFile, str);
}
// create rss files for all blog pages, if there are any blog files
if (MetadataBlog.length > 0) {
let targetFile = join(buildDir, "blog", "feed.xml");
writeFileAndCreateFolder(targetFile, feed());
targetFile = join(buildDir, "blog", "atom.xml");
writeFileAndCreateFolder(targetFile, feed("atom"));
}
// create sitemap
if (MetadataBlog.length > 0 && Object.keys(Metadata).length > 0) {
let targetFile = join(buildDir, "sitemap.xml");
sitemap(xml => {
writeFileAndCreateFolder(targetFile, xml);
});
}
// copy blog assets if they exist
if (fs.existsSync(join(CWD, "blog", "assets"))) {
fs.copySync(join(CWD, "blog", "assets"), join(buildDir, "blog", "assets"));
}
// copy all static files from docusaurus
files = glob.sync(join(__dirname, "..", "static", "**"));
files.forEach(file => {
let targetFile = join(
buildDir,
// TODO: use x-platform path functions
file.split("/static/")[1] || ""
);
// parse css files to replace colors according to siteConfig
if (file.match(/\.css$/)) {
let cssContent = fs.readFileSync(file, "utf8");
if (
!siteConfig.colors ||
!siteConfig.colors.primaryColor ||
!siteConfig.colors.secondaryColor
) {
console.error(
`${chalk.yellow(
"Missing color configuration."
)} Make sure siteConfig.colors includes primaryColor and secondaryColor fields.`
);
}
Object.keys(siteConfig.colors).forEach(key => {
const color = siteConfig.colors[key];
cssContent = cssContent.replace(new RegExp("\\$" + key, "g"), color);
});
mkdirp.sync(targetFile.replace(new RegExp("/[^/]*$"), ""));
fs.writeFileSync(targetFile, cssContent);
} else if (!fs.lstatSync(file).isDirectory()) {
mkdirp.sync(targetFile.replace(new RegExp("/[^/]*$"), ""));
fs.copySync(file, targetFile);
}
});
// copy all static files from user
files = glob.sync(join(CWD, "static", "**"));
files.forEach(file => {
// parse css files to replace colors according to siteConfig
if (file.match(/\.css$/) && !isSeparateCss(file)) {
const mainCss = join(buildDir, "css", "main.css");
let cssContent = fs.readFileSync(file, "utf8");
cssContent = fs.readFileSync(mainCss, "utf8") + "\n" + cssContent;
Object.keys(siteConfig.colors).forEach(key => {
const color = siteConfig.colors[key];
cssContent = cssContent.replace(new RegExp("\\$" + key, "g"), color);
});
fs.writeFileSync(mainCss, cssContent);
} else if (!fs.lstatSync(file).isDirectory()) {
let parts = file.split("/static/");
let targetFile = join(buildDir, parts[1]);
mkdirp.sync(targetFile.replace(new RegExp("/[^/]*$"), ""));
fs.copySync(file, targetFile);
}
});
// compile/copy pages from user
let pagesArr = [];
files = glob.sync(join(CWD, "pages", "**"));
files.forEach(file => {
// render .js files to strings
if (file.match(/\.js$/)) {
// make temp file for sake of require paths
const parts = file.split("pages");
let tempFile = join(__dirname, "..", "pages", parts[1]);
tempFile = tempFile.replace(
path.basename(file),
"temp" + path.basename(file)
);
mkdirp.sync(tempFile.replace(new RegExp("/[^/]*$"), ""));
fs.copySync(file, tempFile);
const ReactComp = require(tempFile);
let targetFile = join(buildDir, parts[1]);
targetFile = targetFile.replace(/\.js$/, ".html");
const regexLang = /\/pages\/(.*)\//;
const match = regexLang.exec(file);
const langParts = match[1].split("/");
if (langParts.indexOf("en") !== -1) {
// copy and compile a page for each enabled language from the English file
for (let i = 0; i < enabledLanguages.length; i++) {
let language = enabledLanguages[i];
// skip conversion from english file if a file exists for this language
if (
language !== "en" &&
// TODO: use path functions
fs.existsSync(file.replace("/en/", "/" + language + "/"))
) {
continue;
}
translate.setLanguage(language);
const str = renderToStaticMarkup(
<Site language={language} config={siteConfig}>
<ReactComp language={language} />
</Site>
);
writeFileAndCreateFolder(
// TODO: use path functions
targetFile.replace("/en/", "/" + language + "/"),
str
);
}
} else {
// allow for rendering of other files not in pages/en folder
let language = "en";
for (let i = 0; i < langParts.length; i++) {
if (enabledLanguages.indexOf(langParts[i]) !== -1) {
language = langParts[i];
}
}
translate.setLanguage(language);
const str = renderToStaticMarkup(
<Site language={language} config={siteConfig}>
<ReactComp language={language} />
</Site>
);
writeFileAndCreateFolder(targetFile, str);
}
fs.removeSync(tempFile);
} else if (!fs.lstatSync(file).isDirectory()) {
// copy other non .js files
let parts = file.split("pages");
let targetFile = join(buildDir, parts[1]);
mkdirp.sync(targetFile.replace(new RegExp("/[^/]*$"), ""));
fs.copySync(file, targetFile);
}
});
// copy html files in 'en' to base level as well
files = glob.sync(join(buildDir, "en", "**"));
files.forEach(file => {
let targetFile = file.replace(join(buildDir, "en"), join(buildDir));
if (file.match(/\.html$/)) {
fs.copySync(file, targetFile);
}
});
// Generate CNAME file if a custom domain is specified in siteConfig
if (siteConfig.cname) {
let targetFile = join(buildDir, "CNAME");
fs.writeFileSync(targetFile, siteConfig.cname);
}
}
module.exports = execute;