mirror of
https://github.com/facebook/docusaurus.git
synced 2025-05-22 21:47:01 +02:00
feat(v2): upgrade addRoutes API to introduce query (#1345)
* feat(v2): upgrade addRoutes API to enable query * nits
This commit is contained in:
parent
7e6b74b820
commit
6e9b85fe43
9 changed files with 121 additions and 90 deletions
|
@ -2,10 +2,12 @@
|
|||
"name": "@docusaurus/plugin-content-blog",
|
||||
"version": "1.0.0",
|
||||
"description": "Blog plugin for Docusaurus",
|
||||
"main": "index.js",
|
||||
"main": "src/index.js",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@docusaurus/utils": "^1.0.0",
|
||||
"front-matter": "^3.0.1",
|
||||
"loader-utils": "^1.2.3",
|
||||
"fs-extra": "^7.0.1",
|
||||
"globby": "^9.1.0"
|
||||
},
|
||||
|
|
|
@ -8,7 +8,7 @@
|
|||
const globby = require('globby');
|
||||
const path = require('path');
|
||||
const fs = require('fs-extra');
|
||||
const {parse, idx, normalizeUrl, generate} = require('@docusaurus/utils');
|
||||
const {parse, idx, normalizeUrl} = require('@docusaurus/utils');
|
||||
|
||||
// TODO: Use a better slugify function that doesn't rely on a specific file extension.
|
||||
function fileToUrl(fileName) {
|
||||
|
@ -30,8 +30,6 @@ const DEFAULT_OPTIONS = {
|
|||
blogPostComponent: '@theme/BlogPost',
|
||||
};
|
||||
|
||||
const TRUNCATE_MARKER = /<!--\s*truncate\s*-->/;
|
||||
|
||||
class DocusaurusPluginContentBlog {
|
||||
constructor(opts, context) {
|
||||
this.options = {...DEFAULT_OPTIONS, ...opts};
|
||||
|
@ -54,7 +52,7 @@ class DocusaurusPluginContentBlog {
|
|||
// Fetches blog contents and returns metadata for the contents.
|
||||
async loadContent() {
|
||||
const {pageCount, include, routeBasePath} = this.options;
|
||||
const {env, generatedFilesDir, siteConfig} = this.context;
|
||||
const {env, siteConfig} = this.context;
|
||||
const blogDir = this.contentPath;
|
||||
|
||||
const {baseUrl} = siteConfig;
|
||||
|
@ -82,19 +80,7 @@ class DocusaurusPluginContentBlog {
|
|||
);
|
||||
|
||||
const fileString = await fs.readFile(source, 'utf-8');
|
||||
const {metadata: rawMetadata, content} = parse(fileString);
|
||||
|
||||
let truncatedSource;
|
||||
const isTruncated = TRUNCATE_MARKER.test(content);
|
||||
if (isTruncated) {
|
||||
const pluginContentDir = path.join(generatedFilesDir, this.getName());
|
||||
await generate(
|
||||
pluginContentDir,
|
||||
blogFileName,
|
||||
content.split(TRUNCATE_MARKER)[0],
|
||||
);
|
||||
truncatedSource = path.join(pluginContentDir, blogFileName);
|
||||
}
|
||||
const {metadata: rawMetadata} = parse(fileString);
|
||||
|
||||
const metadata = {
|
||||
permalink: normalizeUrl([
|
||||
|
@ -103,7 +89,6 @@ class DocusaurusPluginContentBlog {
|
|||
fileToUrl(blogFileName),
|
||||
]),
|
||||
source,
|
||||
truncatedSource,
|
||||
...rawMetadata,
|
||||
date,
|
||||
language: defaultLangTag,
|
||||
|
@ -144,9 +129,12 @@ class DocusaurusPluginContentBlog {
|
|||
path: permalink,
|
||||
component: blogPageComponent,
|
||||
metadata: metadataItem,
|
||||
modules: metadataItem.posts.map(
|
||||
post => post.truncatedSource || post.source,
|
||||
),
|
||||
modules: metadataItem.posts.map(post => ({
|
||||
path: post.source,
|
||||
query: {
|
||||
truncated: true,
|
||||
},
|
||||
})),
|
||||
});
|
||||
return;
|
||||
}
|
||||
|
@ -160,8 +148,26 @@ class DocusaurusPluginContentBlog {
|
|||
});
|
||||
}
|
||||
|
||||
// TODO: Add configureWebpack plugin to read Markdown. Currently it's using
|
||||
// the docs plugin's markdown loader.
|
||||
configureWebpack(config, isServer, {getBabelLoader, getCacheLoader}) {
|
||||
return {
|
||||
module: {
|
||||
rules: [
|
||||
{
|
||||
test: /(\.mdx?)$/,
|
||||
include: [this.contentPath],
|
||||
use: [
|
||||
getCacheLoader(isServer),
|
||||
getBabelLoader(isServer),
|
||||
'@docusaurus/mdx-loader',
|
||||
{
|
||||
loader: path.resolve(__dirname, './markdownLoader.js'),
|
||||
},
|
||||
],
|
||||
},
|
||||
],
|
||||
},
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = DocusaurusPluginContentBlog;
|
|
@ -0,0 +1,34 @@
|
|||
/**
|
||||
* Copyright (c) 2017-present, Facebook, Inc.
|
||||
*
|
||||
* This source code is licensed under the MIT license found in the
|
||||
* LICENSE file in the root directory of this source tree.
|
||||
*/
|
||||
|
||||
const fm = require('front-matter');
|
||||
const {parseQuery} = require('loader-utils');
|
||||
|
||||
const TRUNCATE_MARKER = /<!--\s*truncate\s*-->/;
|
||||
|
||||
module.exports = async function(fileString) {
|
||||
const callback = this.async();
|
||||
|
||||
// Extract content of markdown (without frontmatter).
|
||||
let {body: content} = fm(fileString);
|
||||
|
||||
// Truncate content if requested (e.g: file.md?truncated=true)
|
||||
const {truncated} = this.resourceQuery && parseQuery(this.resourceQuery);
|
||||
if (truncated) {
|
||||
if (TRUNCATE_MARKER.test(content)) {
|
||||
// eslint-disable-next-line
|
||||
content = content.split(TRUNCATE_MARKER)[0];
|
||||
} else {
|
||||
// Return first 4 lines of the content as summary
|
||||
content = content
|
||||
.split('\n')
|
||||
.slice(0, 4)
|
||||
.join('\n');
|
||||
}
|
||||
}
|
||||
return callback(null, content);
|
||||
};
|
|
@ -220,7 +220,7 @@ class DocusaurusPluginContentDocs {
|
|||
});
|
||||
}
|
||||
|
||||
configureWebpack(config, isServer) {
|
||||
configureWebpack(config, isServer, {getBabelLoader, getCacheLoader}) {
|
||||
const versionedDir = path.join(this.context.siteDir, 'versioned_docs');
|
||||
const translatedDir = path.join(this.context.siteDir, 'translated_docs');
|
||||
|
||||
|
@ -228,30 +228,12 @@ class DocusaurusPluginContentDocs {
|
|||
module: {
|
||||
rules: [
|
||||
{
|
||||
test: /(\.mdx?)$/, // TODO: Read only this plugin's markdown files.
|
||||
test: /(\.mdx?)$/,
|
||||
include: [this.contentPath],
|
||||
use: [
|
||||
// TODO: Add back cache loader and read babel loader from existing config
|
||||
// instead of duplicating it.
|
||||
{
|
||||
loader: 'babel-loader',
|
||||
options: {
|
||||
// ignore local project babel config (.babelrc)
|
||||
babelrc: false,
|
||||
// ignore local project babel config (babel.config.js)
|
||||
configFile: false,
|
||||
presets: ['@babel/env', '@babel/react'],
|
||||
plugins: [
|
||||
'react-hot-loader/babel', // To enable react-hot-loader
|
||||
isServer
|
||||
? 'dynamic-import-node'
|
||||
: '@babel/syntax-dynamic-import',
|
||||
'react-loadable/babel',
|
||||
],
|
||||
},
|
||||
},
|
||||
{
|
||||
loader: '@docusaurus/mdx-loader',
|
||||
},
|
||||
getCacheLoader(isServer),
|
||||
getBabelLoader(isServer),
|
||||
'@docusaurus/mdx-loader',
|
||||
{
|
||||
loader: path.resolve(__dirname, './markdown/index.js'),
|
||||
options: {
|
||||
|
|
|
@ -6,6 +6,7 @@
|
|||
*/
|
||||
|
||||
const {generateChunkName} = require('@docusaurus/utils');
|
||||
const {stringify} = require('querystring');
|
||||
|
||||
async function loadRoutes(pluginsRouteConfigs) {
|
||||
const imports = [
|
||||
|
@ -28,6 +29,14 @@ async function loadRoutes(pluginsRouteConfigs) {
|
|||
component: NotFound,
|
||||
}`;
|
||||
|
||||
function genImportStr(target, prefix, name) {
|
||||
const isObj = typeof target === 'object';
|
||||
const importStr = isObj ? target.path : target;
|
||||
const queryStr = target.query ? `?${stringify(target.query)}` : '';
|
||||
const chunkName = generateChunkName(name || importStr, prefix);
|
||||
return `() => import(/* webpackChunkName: '${chunkName}' */ '${importStr}${queryStr}')`;
|
||||
}
|
||||
|
||||
function generateRouteCode(pluginRouteConfig) {
|
||||
const {path, component, metadata, modules, routes} = pluginRouteConfig;
|
||||
if (routes) {
|
||||
|
@ -35,10 +44,7 @@ async function loadRoutes(pluginsRouteConfigs) {
|
|||
{
|
||||
path: '${path}',
|
||||
component: Loadable({
|
||||
loader: () => import(/* webpackChunkName: '${generateChunkName(
|
||||
component,
|
||||
'component',
|
||||
)}' */'${component}'),
|
||||
loader: ${genImportStr(component, 'component')},
|
||||
loading: Loading,
|
||||
}),
|
||||
routes: [${routes.map(generateRouteCode).join(',')}],
|
||||
|
@ -46,34 +52,26 @@ async function loadRoutes(pluginsRouteConfigs) {
|
|||
}
|
||||
|
||||
addRoutesPath(path);
|
||||
const genModulesImportStr = `${modules
|
||||
.map((mod, i) => `Mod${i}: ${genImportStr(mod, i, path)},`)
|
||||
.join('\n')}`;
|
||||
const genModulesLoadedStr = `[${modules
|
||||
.map((mod, i) => `loaded.Mod${i}.default,`)
|
||||
.join('\n')}]`;
|
||||
|
||||
return `
|
||||
{
|
||||
path: '${path}',
|
||||
exact: true,
|
||||
component: Loadable.Map({
|
||||
loader: {
|
||||
${modules
|
||||
.map(
|
||||
(module, index) =>
|
||||
` Module${index}: () => import(/* webpackChunkName: '${generateChunkName(
|
||||
path,
|
||||
`module${index}`,
|
||||
)}' */'${module}'),`,
|
||||
)
|
||||
.join('\n')}
|
||||
Component: () => import(/* webpackChunkName: '${generateChunkName(
|
||||
component,
|
||||
'component',
|
||||
)}' */'${component}'),
|
||||
${genModulesImportStr}
|
||||
Component: ${genImportStr(component, 'component')},
|
||||
},
|
||||
loading: Loading,
|
||||
render(loaded, props) {
|
||||
const Component = loaded.Component.default;
|
||||
const modules = [
|
||||
${modules
|
||||
.map((module, index) => ` loaded.Module${index}.default,`)
|
||||
.join('\n')}
|
||||
];
|
||||
const modules = ${genModulesLoadedStr};
|
||||
return (
|
||||
<Component {...props} metadata={${JSON.stringify(
|
||||
metadata,
|
||||
|
|
|
@ -30,10 +30,7 @@ function BlogPage(props) {
|
|||
</Head>
|
||||
<div>
|
||||
{BlogPosts.map((PostContent, index) => (
|
||||
<Post
|
||||
key={index}
|
||||
truncated={posts[index].truncatedSource}
|
||||
metadata={posts[index]}>
|
||||
<Post key={index} truncated metadata={posts[index]}>
|
||||
<PostContent />
|
||||
</Post>
|
||||
))}
|
||||
|
|
|
@ -37,8 +37,8 @@ module.exports = function createBaseConfig(props, isServer) {
|
|||
resolve: {
|
||||
symlinks: true,
|
||||
alias: {
|
||||
// https://github.com/gaearon/react-hot-loader#react--dom
|
||||
'react-dom': isProd ? 'react-dom' : '@hot-loader/react-dom',
|
||||
ejs: 'ejs/ejs.min.js',
|
||||
'@theme': themePath,
|
||||
'@site': siteDir,
|
||||
'@build': outDir,
|
||||
|
|
|
@ -9,20 +9,6 @@ const MiniCssExtractPlugin = require('mini-css-extract-plugin');
|
|||
const cacheLoaderVersion = require('cache-loader/package.json').version;
|
||||
const merge = require('webpack-merge');
|
||||
|
||||
// Modify the generated webpack config with normal webpack config.
|
||||
function applyConfigureWebpack(userConfig, config, isServer) {
|
||||
if (typeof userConfig === 'object') {
|
||||
return merge(config, userConfig);
|
||||
}
|
||||
if (typeof userConfig === 'function') {
|
||||
const res = userConfig(config, isServer);
|
||||
if (res && typeof res === 'object') {
|
||||
return merge(config, res);
|
||||
}
|
||||
}
|
||||
return config;
|
||||
}
|
||||
|
||||
// Utility method to get style loaders
|
||||
function getStyleLoaders(isServer, cssOptions) {
|
||||
const isProd = process.env.NODE_ENV === 'production';
|
||||
|
@ -73,6 +59,33 @@ function getBabelLoader(isServer, babelOptions) {
|
|||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Helper function to modify webpack config
|
||||
* @param {Object | Function} configureWebpack a webpack config or a function to modify config
|
||||
* @param {Object} config initial webpack config
|
||||
* @param {Boolean} isServer indicates if this is a server webpack configuration
|
||||
* @returns {Object} final/ modified webpack config
|
||||
*/
|
||||
function applyConfigureWebpack(configureWebpack, config, isServer) {
|
||||
if (typeof configureWebpack === 'object') {
|
||||
return merge(config, configureWebpack);
|
||||
}
|
||||
|
||||
// Export some utility functions
|
||||
const utils = {
|
||||
getStyleLoaders,
|
||||
getCacheLoader,
|
||||
getBabelLoader,
|
||||
};
|
||||
if (typeof configureWebpack === 'function') {
|
||||
const res = configureWebpack(config, isServer, utils);
|
||||
if (res && typeof res === 'object') {
|
||||
return merge(config, res);
|
||||
}
|
||||
}
|
||||
return config;
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
getBabelLoader,
|
||||
getCacheLoader,
|
||||
|
|
|
@ -51,7 +51,6 @@
|
|||
"globby": "^9.1.0",
|
||||
"html-webpack-plugin": "^3.2.0",
|
||||
"is-wsl": "^1.1.0",
|
||||
"loader-utils": "^1.1.0",
|
||||
"lodash": "^4.17.11",
|
||||
"mini-css-extract-plugin": "^0.4.1",
|
||||
"portfinder": "^1.0.13",
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue