refactor(v2): blog data revamp (#1450)

* refactor(v2): blog data revamp

* fix(v2): fix incorrect blog total count

* misc: remove console.log

* feat(v2): export frontMatter as an object within MDX file (#1451)

* refactor. Don't confuse metadata & frontmatter

* export frontMatter in content itself

* nits

* nits name

* dont truncate first four lines in blog
This commit is contained in:
Yangshun Tay 2019-05-13 00:16:15 -07:00 committed by GitHub
parent 070723697f
commit 23b50f17a1
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
18 changed files with 258 additions and 119 deletions

View file

@ -14,6 +14,7 @@
"@mdx-js/mdx": "^1.0.18",
"@mdx-js/react": "^1.0.16",
"github-slugger": "^1.2.1",
"gray-matter": "^4.0.2",
"loader-utils": "^1.2.3",
"mdast-util-to-string": "^1.0.5",
"prism-themes": "^1.1.0",

View file

@ -10,6 +10,8 @@ const mdx = require('@mdx-js/mdx');
const rehypePrism = require('@mapbox/rehype-prism');
const emoji = require('remark-emoji');
const slug = require('rehype-slug');
const matter = require('gray-matter');
const stringifyObject = require('stringify-object');
const linkHeadings = require('./linkHeadings');
const rightToc = require('./rightToc');
@ -19,9 +21,10 @@ const DEFAULT_OPTIONS = {
prismTheme: 'prism-themes/themes/prism-atom-dark.css',
};
module.exports = async function(content) {
module.exports = async function(fileString) {
const callback = this.async();
const {data, content} = matter(fileString);
const options = Object.assign(DEFAULT_OPTIONS, getOptions(this), {
filepath: this.resourcePath,
});
@ -43,6 +46,7 @@ module.exports = async function(content) {
import React from 'react';
import { mdx } from '@mdx-js/react';
${importStr}
export const frontMatter = ${stringifyObject(data)};
${result}
`;

View file

@ -12,7 +12,6 @@
"@docusaurus/utils": "^2.0.0-alpha.13",
"fs-extra": "^7.0.1",
"globby": "^9.1.0",
"gray-matter": "^4.0.2",
"loader-utils": "^1.2.3"
},
"peerDependencies": {

View file

@ -23,7 +23,7 @@ const DEFAULT_OPTIONS = {
path: 'blog', // Path to data on filesystem, relative to site dir.
routeBasePath: 'blog', // URL Route.
include: ['*.md', '*.mdx'], // Extensions to include.
pageCount: 10, // How many entries per page.
postsPerPage: 10, // How many posts per page.
blogListComponent: '@theme/BlogListPage',
blogPostComponent: '@theme/BlogPostPage',
};
@ -47,9 +47,9 @@ class DocusaurusPluginContentBlog {
return [...globPattern];
}
// Fetches blog contents and returns metadata for the contents.
// Fetches blog contents and returns metadata for the necessary routes.
async loadContent() {
const {pageCount, include, routeBasePath} = this.options;
const {postsPerPage, include, routeBasePath} = this.options;
const {siteConfig} = this.context;
const blogDir = this.contentPath;
@ -58,8 +58,7 @@ class DocusaurusPluginContentBlog {
cwd: blogDir,
});
// Prepare metadata container.
const blogMetadata = [];
const blogPosts = [];
await Promise.all(
blogFiles.map(async relativeSource => {
@ -75,82 +74,141 @@ class DocusaurusPluginContentBlog {
);
const fileString = await fs.readFile(source, 'utf-8');
const {metadata: rawMetadata, excerpt: description} = parse(fileString);
const {frontMatter, excerpt} = parse(fileString);
const metadata = {
permalink: normalizeUrl([
baseUrl,
routeBasePath,
fileToUrl(blogFileName),
]),
source,
description,
...rawMetadata,
date,
};
blogMetadata.push(metadata);
blogPosts.push({
id: blogFileName,
metadata: {
permalink: normalizeUrl([
baseUrl,
routeBasePath,
fileToUrl(blogFileName),
]),
source,
description: frontMatter.description || excerpt,
date,
title: frontMatter.title || blogFileName,
},
});
}),
);
blogMetadata.sort((a, b) => b.date - a.date);
blogPosts.sort((a, b) => b.metadata.date - a.metadata.date);
// Blog page handling. Example: `/blog`, `/blog/page1`, `/blog/page2`
const numOfBlog = blogMetadata.length;
const numberOfPage = Math.ceil(numOfBlog / pageCount);
// Blog pagination routes.
// Example: `/blog`, `/blog/page/1`, `/blog/page/2`
const totalCount = blogPosts.length;
const numberOfPages = Math.ceil(totalCount / postsPerPage);
const basePageUrl = normalizeUrl([baseUrl, routeBasePath]);
// eslint-disable-next-line
for (let page = 0; page < numberOfPage; page++) {
blogMetadata.push({
permalink:
page > 0
? normalizeUrl([basePageUrl, `page/${page + 1}`])
: basePageUrl,
isBlogPage: true,
posts: blogMetadata.slice(page * pageCount, (page + 1) * pageCount),
const blogListPaginated = [];
function blogPaginationPermalink(page) {
return page > 0
? normalizeUrl([basePageUrl, `page/${page + 1}`])
: basePageUrl;
}
for (let page = 0; page < numberOfPages; page += 1) {
blogListPaginated.push({
metadata: {
permalink: blogPaginationPermalink(page),
page: page + 1,
postsPerPage,
totalPages: numberOfPages,
totalCount,
previousPage: page !== 0 ? blogPaginationPermalink(page - 1) : null,
nextPage:
page < numberOfPages - 1 ? blogPaginationPermalink(page + 1) : null,
},
items: blogPosts
.slice(page * postsPerPage, (page + 1) * postsPerPage)
.map(item => item.id),
});
}
return blogMetadata;
return {
blogPosts,
blogListPaginated,
};
}
async contentLoaded({content, actions}) {
async contentLoaded({content: blogContents, actions}) {
const {blogListComponent, blogPostComponent} = this.options;
const {addRoute, createData} = actions;
await Promise.all(
content.map(async metadataItem => {
const {isBlogPage, permalink} = metadataItem;
const {blogPosts, blogListPaginated} = blogContents;
const blogItemsToModules = {};
// Create routes for blog entries.
const blogItems = await Promise.all(
blogPosts.map(async blogPost => {
const {id, metadata} = blogPost;
const {permalink} = metadata;
const metadataPath = await createData(
`${docuHash(permalink)}.json`,
JSON.stringify(metadataItem, null, 2),
JSON.stringify(metadata, null, 2),
);
if (isBlogPage) {
addRoute({
path: permalink,
component: blogListComponent,
exact: true,
modules: {
entries: metadataItem.posts.map(post => ({
// To tell routes.js this is an import and not a nested object to recurse.
__import: true,
path: post.source,
query: {
truncated: true,
},
})),
metadata: metadataPath,
},
});
const temp = {
metadata,
metadataPath,
};
return;
}
blogItemsToModules[id] = temp;
return temp;
}),
);
blogItems.forEach((blogItem, index) => {
const prevItem = index > 0 ? blogItems[index - 1] : null;
const nextItem =
index < blogItems.length - 1 ? blogItems[index + 1] : null;
const {metadata, metadataPath} = blogItem;
const {source, permalink} = metadata;
addRoute({
path: permalink,
component: blogPostComponent,
exact: true,
modules: {
content: source,
metadata: metadataPath,
prevItem: prevItem && prevItem.metadataPath,
nextItem: nextItem && nextItem.metadataPath,
},
});
});
// Create routes for blog's paginated list entries.
await Promise.all(
blogListPaginated.map(async listPage => {
const {metadata, items} = listPage;
const {permalink} = metadata;
const pageMetadataPath = await createData(
`${docuHash(permalink)}.json`,
JSON.stringify(metadata, null, 2),
);
addRoute({
path: permalink,
component: blogPostComponent,
component: blogListComponent,
exact: true,
modules: {
content: metadataItem.source,
metadata: metadataPath,
items: items.map(postID => {
const {metadata: postMetadata, metadataPath} = blogItemsToModules[
postID
];
// To tell routes.js this is an import and not a nested object to recurse.
return {
content: {
__import: true,
path: postMetadata.source,
query: {
truncated: true,
},
},
metadata: metadataPath,
};
}),
metadata: pageMetadataPath,
},
});
}),

View file

@ -5,7 +5,6 @@
* LICENSE file in the root directory of this source tree.
*/
const matter = require('gray-matter');
const {parseQuery} = require('loader-utils');
const TRUNCATE_MARKER = /<!--\s*truncate\s*-->/;
@ -13,22 +12,13 @@ const TRUNCATE_MARKER = /<!--\s*truncate\s*-->/;
module.exports = async function(fileString) {
const callback = this.async();
// Extract content of markdown (without frontmatter).
let {content} = matter(fileString);
let finalContent = fileString;
// Truncate content if requested (e.g: file.md?truncated=true)
const {truncated} = this.resourceQuery && parseQuery(this.resourceQuery);
if (truncated) {
if (TRUNCATE_MARKER.test(content)) {
// eslint-disable-next-line
content = content.split(TRUNCATE_MARKER)[0];
} else {
// Return first 4 lines of the content as summary
content = content
.split('\n')
.slice(0, 4)
.join('\n');
}
if (truncated && TRUNCATE_MARKER.test(fileString)) {
// eslint-disable-next-line
finalContent = fileString.split(TRUNCATE_MARKER)[0];
}
return callback(null, content);
return callback(null, finalContent);
};

View file

@ -9,25 +9,31 @@ import React from 'react';
import Layout from '@theme/Layout'; // eslint-disable-line
import BlogPostItem from '@theme/BlogPostItem';
import BlogListPaginator from '@theme/BlogListPaginator';
function BlogListPage(props) {
const {
metadata: {posts = []},
entries: BlogPosts,
} = props;
const {metadata, items} = props;
return (
<Layout title="Blog" description="Blog">
<div className="container margin-vert--xl">
<div className="row">
<div className="col col--6 col--offset-3">
{BlogPosts.map((PostContent, index) => (
<div className="margin-bottom--xl" key={index}>
<BlogPostItem truncated metadata={posts[index]}>
<PostContent />
</BlogPostItem>
</div>
))}
<div className="col col--8 col--offset-2">
{items.map(
({content: BlogPostContent, metadata: blogPostMetadata}) => (
<div
className="margin-bottom--xl"
key={blogPostMetadata.permalink}>
<BlogPostItem
frontMatter={BlogPostContent.frontMatter}
metadata={blogPostMetadata}
truncated>
<BlogPostContent />
</BlogPostItem>
</div>
),
)}
<BlogListPaginator metadata={metadata} />
</div>
</div>
</div>

View file

@ -0,0 +1,35 @@
/**
* Copyright (c) 2017-present, Facebook, Inc.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree.
*/
import React from 'react';
import Link from '@docusaurus/Link';
function BlogListPaginator(props) {
const {metadata} = props;
const {previousPage, nextPage} = metadata;
return (
<div className="row">
<div className="col col--6">
{previousPage && (
<Link className="button button--secondary" to={previousPage}>
Newer entries
</Link>
)}
</div>
<div className="col col--6 text--right">
{nextPage && (
<Link className="button button--secondary" to={nextPage}>
Older entries
</Link>
)}
</div>
</div>
);
}
export default BlogListPaginator;

View file

@ -9,20 +9,11 @@ import React from 'react';
import Link from '@docusaurus/Link';
function BlogPostItem(props) {
const {metadata, children, truncated} = props;
const {children, frontMatter, metadata, truncated} = props;
const renderPostHeader = () => {
if (!metadata) {
return null;
}
const {
date,
author,
authorURL,
authorTitle,
authorFBID,
permalink,
title,
} = metadata;
const {author, authorURL, authorTitle, authorFBID, title} = frontMatter;
const {date, permalink} = metadata;
const blogPostDate = new Date(date);
const month = [
@ -39,9 +30,10 @@ function BlogPostItem(props) {
'November',
'December',
];
const authorImageURL = authorFBID
? `https://graph.facebook.com/${authorFBID}/picture/?height=200&width=200`
: metadata.authorImageURL;
: frontMatter.authorImageURL;
return (
<header>
@ -88,10 +80,8 @@ function BlogPostItem(props) {
{renderPostHeader()}
<article className="markdown">{children}</article>
{truncated && (
<div className="text--right">
<Link className="button button--secondary" to={metadata.permalink}>
Read More
</Link>
<div className="text--right margin-vert--md">
<Link to={metadata.permalink}>Read More</Link>
</div>
)}
</div>

View file

@ -9,19 +9,23 @@ import React from 'react';
import Layout from '@theme/Layout'; // eslint-disable-line
import BlogPostItem from '@theme/BlogPostItem';
import BlogPostPaginator from '../BlogPostPaginator';
function BlogPostPage(props) {
const {content: BlogPostContents, metadata} = props;
const {content: BlogPostContents, metadata, nextItem, prevItem} = props;
const {frontMatter} = BlogPostContents;
return (
<Layout title={metadata.title} description={metadata.description}>
{BlogPostContents && (
<div className="container margin-vert--xl">
<div className="row">
<div className="col col--6 col--offset-3">
<BlogPostItem metadata={metadata}>
<div className="col col--8 col--offset-2">
<BlogPostItem frontMatter={frontMatter} metadata={metadata}>
<BlogPostContents />
</BlogPostItem>
<div className="margin-vert--lg">
<BlogPostPaginator nextItem={nextItem} prevItem={prevItem} />
</div>
</div>
</div>
</div>

View file

@ -0,0 +1,34 @@
/**
* Copyright (c) 2017-present, Facebook, Inc.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree.
*/
import React from 'react';
import Link from '@docusaurus/Link';
function BlogPostPaginator(props) {
const {nextItem, prevItem} = props;
return (
<div className="row">
<div className="col col--6">
{prevItem && (
<Link className="button button--secondary" to={prevItem.permalink}>
{prevItem.title}
</Link>
)}
</div>
<div className="col col--6 text--right">
{nextItem && (
<Link className="button button--secondary" to={nextItem.permalink}>
{nextItem.title}
</Link>
)}
</div>
</div>
);
}
export default BlogPostPaginator;

View file

@ -13,7 +13,6 @@
"@docusaurus/utils": "^2.0.0-alpha.13",
"fs-extra": "^7.0.1",
"globby": "^9.1.0",
"gray-matter": "^4.0.2",
"import-fresh": "^3.0.0",
"loader-utils": "^1.2.3"
},

View file

@ -5,7 +5,6 @@
* LICENSE file in the root directory of this source tree.
*/
const matter = require('gray-matter');
const {getOptions} = require('loader-utils');
const {resolve} = require('url');
@ -16,9 +15,6 @@ module.exports = async function(fileString) {
});
const {docsDir, sourceToPermalink} = options;
// Extract content of markdown (without frontmatter).
let {content} = matter(fileString);
// Determine the source dir. e.g: /docs, /website/versioned_docs/version-1.0.0
let sourceDir;
const thisSource = this.resourcePath;
@ -26,6 +22,8 @@ module.exports = async function(fileString) {
sourceDir = docsDir;
}
let content = fileString;
// Replace internal markdown linking (except in fenced blocks).
if (sourceDir) {
let fencedBlock = false;

View file

@ -18,7 +18,7 @@ module.exports = async function processMetadata(
) {
const filepath = path.resolve(refDir, source);
const fileString = await fs.readFile(filepath, 'utf-8');
const {metadata = {}, excerpt} = parse(fileString);
const {frontMatter: metadata = {}, excerpt} = parse(fileString);
// Default id is the file name.
if (!metadata.id) {

View file

@ -166,7 +166,7 @@ function getSubFolder(file, refDir) {
* @returns {Object}
*/
function parse(fileString) {
const {data: metadata, content, excerpt} = matter(fileString, {
const {data: frontMatter, content, excerpt} = matter(fileString, {
excerpt(file) {
// eslint-disable-next-line no-param-reassign
file.excerpt = file.content
@ -175,7 +175,7 @@ function parse(fileString) {
.shift();
},
});
return {metadata, content, excerpt};
return {frontMatter, content, excerpt};
}
/**

View file

@ -28,7 +28,7 @@ function ComponentCreator(path) {
/* Prepare opts data that react-loadable needs
https://github.com/jamiebuilds/react-loadable#declaring-which-modules-are-being-loaded
Example:
- optsLoader:
- optsLoader:
{
component: () => import('./Pages.js'),
content.foo: () => import('./doc1.md'),
@ -44,6 +44,10 @@ function ComponentCreator(path) {
return;
}
if (target == null) {
return;
}
if (typeof target === 'object') {
Object.keys(target).forEach(key => {
traverseChunk(target[key], [...keys, key]);

View file

@ -49,6 +49,10 @@ async function loadRoutes(pluginsRouteConfigs) {
// Given an input (object or string), get the import path str
const getModulePath = target => {
if (!target) {
return null;
}
const importStr = _.isObject(target) ? target.path : target;
const queryStr = target.query ? `?${stringify(target.query)}` : '';
return `${importStr}${queryStr}`;
@ -57,9 +61,14 @@ async function loadRoutes(pluginsRouteConfigs) {
if (!component) {
throw new Error(`path: ${routePath} need a component`);
}
const componentPath = getModulePath(component);
const genImportChunk = (modulePath, prefix, name) => {
if (!modulePath) {
return null;
}
const chunkName = genChunkName(modulePath, prefix, name);
const finalStr = JSON.stringify(modulePath);
return {
@ -90,6 +99,11 @@ async function loadRoutes(pluginsRouteConfigs) {
prefix,
routePath,
);
if (!importChunk) {
return null;
}
registry[importChunk.chunkName] = {
importStatement: importChunk.importStatement,
modulePath: importChunk.modulePath,

View file

@ -11,6 +11,8 @@ authorTwitter: JoelMarcey
Docusaurus [went live](https://docusaurus.io/blog/2017/12/14/introducing-docusaurus) on December 14, 2017. At the time, we had [8 early adopters](https://docusaurus.io/blog/2017/12/14/introducing-docusaurus#acknowledgements).
<!--truncate-->
We now have nearly [60 known users of Docusaurus](https://docusaurus.io/en/users), and probably more that we don't know about. We have [9K GitHub stars](https://github.com/facebook/docusaurus) and an active community, particularly [Yangshun Tay](https://twitter.com/yangshunz) and [Endilie Yacop Sucipto](https://twitter.com/endiliey), both of whom are the lead maintainers helping keep this project [moving forward](https://docusaurus.io/blog/2018/09/11/Towards-Docusaurus-2).
Thank you to everyone for your support and use of this project! I am super proud of how far this project has come in just a year.

View file

@ -36,6 +36,7 @@ module.exports = {
},
blog: {
path: '../website-1.x/blog',
postsPerPage: 3,
},
},
],