feat(v2): Implement plugin creating feed for blog posts (#1916)

* feat(v2): Implement feed for blog posts

Fixes: #1698

Test plan:
- added tests

Ran `yarn build` on website with the following config (and disabled blog
from preset-classic):

```js
[
'@docusaurus/plugin-content-blog',
  {
    path: '../website-1.x/blog',
    feedOptions: {
      copyright: 'Copy',
      type: 'atom',
    },
  },
],
```
which genereted the following feed:
```xml
<?xml version="1.0" encoding="utf-8"?>
<feed xmlns="http://www.w3.org/2005/Atom">
    <id>https://v2.docusaurus.io/blog</id>
    <title>Docusaurus Blog</title>
    <updated>2018-12-14T00:00:00.000Z</updated>
    <generator>https://github.com/jpmonette/feed</generator>
    <link rel="alternate" href="https://v2.docusaurus.io/blog"/>
    <subtitle>Docusaurus Blog</subtitle>
    <icon>https://v2.docusaurus.io/img/docusaurus.ico</icon>
    <rights>Copy</rights>
    <entry>
        <title type="html"><![CDATA[Happy 1st Birthday Slash!]]></title>
        <id>Happy 1st Birthday Slash!</id>
        <link href="https://v2.docusaurus.io/blog/2018/12/14/Happy-First-Birthday-Slash"/>
        <updated>2018-12-14T00:00:00.000Z</updated>
        <summary type="html"><![CDATA[![First Birthday Slash](/img/docusaurus-slash-first-birthday.svg)]]></summary>
    </entry>
    <entry>
        <title type="html"><![CDATA[Towards Docusaurus 2]]></title>
        <id>Towards Docusaurus 2</id>
        <link href="https://v2.docusaurus.io/blog/2018/09/11/Towards-Docusaurus-2"/>
        <updated>2018-09-11T00:00:00.000Z</updated>
        <summary type="html"><![CDATA[Docusaurus was [officially announced](https://docusaurus.io/blog/2017/12/14/introducing-docusaurus) over nine months ago as a way to easily build open source documentation websites. Since then, it has amassed over 8,600 GitHub Stars, and is used by many popular open source projects such as [React Native](https://facebook.github.io/react-native/), [Babel](https://babeljs.io/), [Jest](https://jestjs.io/), [Reason](https://reasonml.github.io/) and [Prettier](https://prettier.io/).]]></summary>
    </entry>
    <entry>
        <title type="html"><![CDATA[How I Converted Profilo to Docusaurus in Under 2 Hours]]></title>
        <id>How I Converted Profilo to Docusaurus in Under 2 Hours</id>
        <link href="https://v2.docusaurus.io/blog/2018/04/30/How-I-Converted-Profilo-To-Docusaurus"/>
        <updated>2018-04-30T00:00:00.000Z</updated>
        <summary type="html"><![CDATA[> _“Joel and I were discussing having a website and how it would have been great to launch with it. So I challenged myself to add Docusaurus support. It took just over an hour and a half. I'm going to send you a PR with the addition so you can take a look and see if you like it. Your workflow for adding docs wouldn't be much different from editing those markdown files.”_]]></summary>
    </entry>
    <entry>
        <title type="html"><![CDATA[Introducing Docusaurus]]></title>
        <id>Introducing Docusaurus</id>
        <link href="https://v2.docusaurus.io/blog/2017/12/14/introducing-docusaurus"/>
        <updated>2017-12-14T00:00:00.000Z</updated>
        <summary type="html"><![CDATA[![Introducing Slash](/img/slash-introducing.svg)]]></summary>
    </entry>
</feed>
```

* new feedOptions type 'all' and use correct path
This commit is contained in:
Pawel Kadluczka 2019-11-05 23:45:31 -08:00 committed by Endi
parent c507028cb0
commit ff83e6f8bc
12 changed files with 405 additions and 84 deletions

View file

@ -5,16 +5,13 @@
* LICENSE file in the root directory of this source tree.
*/
import fs from 'fs-extra';
import globby from 'globby';
import _ from 'lodash';
import path from 'path';
import {parse, normalizeUrl, docuHash} from '@docusaurus/utils';
import {normalizeUrl, docuHash} from '@docusaurus/utils';
import {
DateLink,
PluginOptions,
BlogTags,
BlogPost,
Tag,
BlogContent,
BlogItemsToModules,
@ -24,19 +21,10 @@ import {
LoadContext,
PluginContentLoadedActions,
ConfigureWebpackUtils,
Props,
} from '@docusaurus/types';
import {Configuration} from 'webpack';
// YYYY-MM-DD-{name}.mdx?
// prefer named capture, but old node version do not support
const FILENAME_PATTERN = /^(\d{4}-\d{1,2}-\d{1,2})-?(.*?).mdx?$/;
function toUrl({date, link}: DateLink) {
return `${date
.toISOString()
.substring(0, '2019-01-01'.length)
.replace(/-/g, '/')}/${link}`;
}
import {generateBlogFeed, generateBlogPosts} from './blogUtils';
const DEFAULT_OPTIONS: PluginOptions = {
path: 'blog', // Path to data on filesystem, relative to site dir.
@ -74,69 +62,13 @@ export default function pluginContentBlog(
// Fetches blog contents and returns metadata for the necessary routes.
async loadContent() {
const {postsPerPage, include, routeBasePath} = options;
const {siteConfig, siteDir} = context;
const blogDir = contentPath;
const {postsPerPage, routeBasePath} = options;
if (!fs.existsSync(blogDir)) {
const blogPosts = await generateBlogPosts(contentPath, context, options);
if (!blogPosts) {
return null;
}
const {baseUrl = ''} = siteConfig;
const blogFiles = await globby(include, {
cwd: blogDir,
});
const blogPosts: BlogPost[] = [];
await Promise.all(
blogFiles.map(async (relativeSource: string) => {
// Cannot use path.join() as it resolves '../' and removes the '@site'. Let webpack loader resolve it.
const source = path.join(blogDir, relativeSource);
const aliasedSource = `@site/${path.relative(siteDir, source)}`;
const blogFileName = path.basename(relativeSource);
const fileString = await fs.readFile(source, 'utf-8');
const {frontMatter, excerpt} = parse(fileString);
let date;
// extract date and title from filename
const match = blogFileName.match(FILENAME_PATTERN);
let linkName = blogFileName.replace(/\.mdx?$/, '');
if (match) {
const [, dateString, name] = match;
date = new Date(dateString);
linkName = name;
}
// prefer usedefined date
if (frontMatter.date) {
date = new Date(frontMatter.date);
}
// use file create time for blog
date = date || (await fs.stat(source)).birthtime;
frontMatter.title = frontMatter.title || linkName;
blogPosts.push({
id: frontMatter.id || frontMatter.title,
metadata: {
permalink: normalizeUrl([
baseUrl,
routeBasePath,
frontMatter.id || toUrl({date, link: linkName}),
]),
source: aliasedSource,
description: frontMatter.description || excerpt,
date,
tags: frontMatter.tags,
title: frontMatter.title,
},
});
}),
);
blogPosts.sort(
(a, b) => b.metadata.date.getTime() - a.metadata.date.getTime(),
);
// Colocate next and prev metadata
blogPosts.forEach((blogPost, index) => {
const prevItem = index > 0 ? blogPosts[index - 1] : null;
@ -160,6 +92,9 @@ export default function pluginContentBlog(
// Example: `/blog`, `/blog/page/1`, `/blog/page/2`
const totalCount = blogPosts.length;
const numberOfPages = Math.ceil(totalCount / postsPerPage);
const {
siteConfig: {baseUrl = ''},
} = context;
const basePageUrl = normalizeUrl([baseUrl, routeBasePath]);
const blogListPaginated = [];
@ -442,5 +377,41 @@ export default function pluginContentBlog(
},
};
},
async postBuild({outDir}: Props) {
if (!options.feedOptions) {
return;
}
const {
feedOptions: {type: feedType},
} = options;
const feed = await generateBlogFeed(context, options);
if (!feed) {
return;
}
let feedTypes = [];
if (feedType === 'all') {
feedTypes = ['rss', 'atom'];
} else {
feedTypes.push(feedType);
}
await Promise.all(
feedTypes.map(feedType => {
const feedPath = path.join(
outDir,
options.routeBasePath,
`${feedType}.xml`,
);
const feedContent = feedType === 'rss' ? feed.rss2() : feed.atom1();
return fs.writeFile(feedPath, feedContent, err => {
if (err) {
throw new Error(`Generating ${feedType} feed failed: ${err}`);
}
});
}),
);
},
};
}