mirror of
https://github.com/facebook/docusaurus.git
synced 2025-05-18 11:36:53 +02:00
feat(v2): Implement plugin creating feed for blog posts (#1916)
* feat(v2): Implement feed for blog posts Fixes: #1698 Test plan: - added tests Ran `yarn build` on website with the following config (and disabled blog from preset-classic): ```js [ '@docusaurus/plugin-content-blog', { path: '../website-1.x/blog', feedOptions: { copyright: 'Copy', type: 'atom', }, }, ], ``` which genereted the following feed: ```xml <?xml version="1.0" encoding="utf-8"?> <feed xmlns="http://www.w3.org/2005/Atom"> <id>https://v2.docusaurus.io/blog</id> <title>Docusaurus Blog</title> <updated>2018-12-14T00:00:00.000Z</updated> <generator>https://github.com/jpmonette/feed</generator> <link rel="alternate" href="https://v2.docusaurus.io/blog"/> <subtitle>Docusaurus Blog</subtitle> <icon>https://v2.docusaurus.io/img/docusaurus.ico</icon> <rights>Copy</rights> <entry> <title type="html"><![CDATA[Happy 1st Birthday Slash!]]></title> <id>Happy 1st Birthday Slash!</id> <link href="https://v2.docusaurus.io/blog/2018/12/14/Happy-First-Birthday-Slash"/> <updated>2018-12-14T00:00:00.000Z</updated> <summary type="html"><]]></summary> </entry> <entry> <title type="html"><![CDATA[Towards Docusaurus 2]]></title> <id>Towards Docusaurus 2</id> <link href="https://v2.docusaurus.io/blog/2018/09/11/Towards-Docusaurus-2"/> <updated>2018-09-11T00:00:00.000Z</updated> <summary type="html">< over nine months ago as a way to easily build open source documentation websites. Since then, it has amassed over 8,600 GitHub Stars, and is used by many popular open source projects such as [React Native](https://facebook.github.io/react-native/), [Babel](https://babeljs.io/), [Jest](https://jestjs.io/), [Reason](https://reasonml.github.io/) and [Prettier](https://prettier.io/).]]></summary> </entry> <entry> <title type="html"><![CDATA[How I Converted Profilo to Docusaurus in Under 2 Hours]]></title> <id>How I Converted Profilo to Docusaurus in Under 2 Hours</id> <link href="https://v2.docusaurus.io/blog/2018/04/30/How-I-Converted-Profilo-To-Docusaurus"/> <updated>2018-04-30T00:00:00.000Z</updated> <summary type="html"><![CDATA[> _“Joel and I were discussing having a website and how it would have been great to launch with it. So I challenged myself to add Docusaurus support. It took just over an hour and a half. I'm going to send you a PR with the addition so you can take a look and see if you like it. Your workflow for adding docs wouldn't be much different from editing those markdown files.”_]]></summary> </entry> <entry> <title type="html"><![CDATA[Introducing Docusaurus]]></title> <id>Introducing Docusaurus</id> <link href="https://v2.docusaurus.io/blog/2017/12/14/introducing-docusaurus"/> <updated>2017-12-14T00:00:00.000Z</updated> <summary type="html"><]]></summary> </entry> </feed> ``` * new feedOptions type 'all' and use correct path
This commit is contained in:
parent
c507028cb0
commit
ff83e6f8bc
12 changed files with 405 additions and 84 deletions
|
@ -2,6 +2,7 @@
|
|||
|
||||
## Unreleased
|
||||
|
||||
- Add feed for blog posts.
|
||||
- **HOTFIX for 2.0.0-alpha.32** - Fix build compilation if exists only one code tab.
|
||||
- Add table of contents highlighting on scroll.
|
||||
- **BREAKING** `prismTheme` is renamed to `theme` as part new `prism` object in `themeConfig` field in your `docusaurus.config.js`. Eg:
|
||||
|
@ -20,7 +21,6 @@
|
|||
### Features
|
||||
|
||||
- Add `<Redirect>` component for easy client side redirect. Example Usage:
|
||||
|
||||
```js
|
||||
import React from 'react';
|
||||
import {Redirect} from '@docusaurus/router';
|
||||
|
|
|
@ -16,6 +16,7 @@
|
|||
"dependencies": {
|
||||
"@docusaurus/mdx-loader": "^2.0.0-alpha.32",
|
||||
"@docusaurus/utils": "^2.0.0-alpha.32",
|
||||
"feed": "^4.0.0",
|
||||
"fs-extra": "^8.1.0",
|
||||
"globby": "^10.0.1",
|
||||
"loader-utils": "^1.2.3",
|
||||
|
|
|
@ -0,0 +1,62 @@
|
|||
// Jest Snapshot v1, https://goo.gl/fbAQLP
|
||||
|
||||
exports[`blogFeed atom can show feed without posts 1`] = `null`;
|
||||
|
||||
exports[`blogFeed atom shows feed item for each post 1`] = `
|
||||
"<?xml version=\\"1.0\\" encoding=\\"utf-8\\"?>
|
||||
<feed xmlns=\\"http://www.w3.org/2005/Atom\\">
|
||||
<id>https://docusaurus.io/blog</id>
|
||||
<title>Hello Blog</title>
|
||||
<updated>2019-01-01T00:00:00.000Z</updated>
|
||||
<generator>https://github.com/jpmonette/feed</generator>
|
||||
<link rel=\\"alternate\\" href=\\"https://docusaurus.io/blog\\"/>
|
||||
<subtitle>Hello Blog</subtitle>
|
||||
<icon>https://docusaurus.io/image/favicon.ico</icon>
|
||||
<rights>Copyright</rights>
|
||||
<entry>
|
||||
<title type=\\"html\\"><![CDATA[date-matter]]></title>
|
||||
<id>date-matter</id>
|
||||
<link href=\\"https://docusaurus.io/blog/2019/01/01/date-matter\\"/>
|
||||
<updated>2019-01-01T00:00:00.000Z</updated>
|
||||
<summary type=\\"html\\"><![CDATA[date inside front matter]]></summary>
|
||||
</entry>
|
||||
<entry>
|
||||
<title type=\\"html\\"><![CDATA[Happy 1st Birthday Slash!]]></title>
|
||||
<id>Happy 1st Birthday Slash!</id>
|
||||
<link href=\\"https://docusaurus.io/blog/2018/12/14/Happy-First-Birthday-Slash\\"/>
|
||||
<updated>2018-12-14T00:00:00.000Z</updated>
|
||||
<summary type=\\"html\\"><![CDATA[pattern name]]></summary>
|
||||
</entry>
|
||||
</feed>"
|
||||
`;
|
||||
|
||||
exports[`blogFeed rss can show feed without posts 1`] = `null`;
|
||||
|
||||
exports[`blogFeed rss shows feed item for each post 1`] = `
|
||||
"<?xml version=\\"1.0\\" encoding=\\"utf-8\\"?>
|
||||
<rss version=\\"2.0\\">
|
||||
<channel>
|
||||
<title>Hello Blog</title>
|
||||
<link>https://docusaurus.io/blog</link>
|
||||
<description>Hello Blog</description>
|
||||
<lastBuildDate>Tue, 01 Jan 2019 00:00:00 GMT</lastBuildDate>
|
||||
<docs>http://blogs.law.harvard.edu/tech/rss</docs>
|
||||
<generator>https://github.com/jpmonette/feed</generator>
|
||||
<copyright>Copyright</copyright>
|
||||
<item>
|
||||
<title><![CDATA[date-matter]]></title>
|
||||
<link>https://docusaurus.io/blog/2019/01/01/date-matter</link>
|
||||
<guid>https://docusaurus.io/blog/2019/01/01/date-matter</guid>
|
||||
<pubDate>Tue, 01 Jan 2019 00:00:00 GMT</pubDate>
|
||||
<description><![CDATA[date inside front matter]]></description>
|
||||
</item>
|
||||
<item>
|
||||
<title><![CDATA[Happy 1st Birthday Slash!]]></title>
|
||||
<link>https://docusaurus.io/blog/2018/12/14/Happy-First-Birthday-Slash</link>
|
||||
<guid>https://docusaurus.io/blog/2018/12/14/Happy-First-Birthday-Slash</guid>
|
||||
<pubDate>Fri, 14 Dec 2018 00:00:00 GMT</pubDate>
|
||||
<description><![CDATA[pattern name]]></description>
|
||||
</item>
|
||||
</channel>
|
||||
</rss>"
|
||||
`;
|
|
@ -0,0 +1,69 @@
|
|||
import path from 'path';
|
||||
import {generateBlogFeed} from '../blogUtils';
|
||||
import {LoadContext} from '@docusaurus/types';
|
||||
import {PluginOptions} from '../types';
|
||||
|
||||
describe('blogFeed', () => {
|
||||
['atom', 'rss'].forEach(feedType => {
|
||||
describe(`${feedType}`, () => {
|
||||
test('can show feed without posts', async () => {
|
||||
const siteConfig = {
|
||||
title: 'Hello',
|
||||
baseUrl: '/',
|
||||
url: 'https://docusaurus.io',
|
||||
favicon: 'image/favicon.ico',
|
||||
};
|
||||
|
||||
const feed = await generateBlogFeed(
|
||||
{
|
||||
siteDir: __dirname,
|
||||
siteConfig,
|
||||
} as LoadContext,
|
||||
{
|
||||
path: 'invalid-blog-path',
|
||||
routeBasePath: 'blog',
|
||||
include: ['*.md', '*.mdx'],
|
||||
feedOptions: {
|
||||
type: feedType as any,
|
||||
copyright: 'Copyright',
|
||||
},
|
||||
} as PluginOptions,
|
||||
);
|
||||
const feedContent =
|
||||
feed && (feedType === 'rss' ? feed.rss2() : feed.atom1());
|
||||
expect(feedContent).toMatchSnapshot();
|
||||
});
|
||||
|
||||
test('shows feed item for each post', async () => {
|
||||
const siteDir = path.join(__dirname, '__fixtures__', 'website');
|
||||
const generatedFilesDir = path.resolve(siteDir, '.docusaurus');
|
||||
const siteConfig = {
|
||||
title: 'Hello',
|
||||
baseUrl: '/',
|
||||
url: 'https://docusaurus.io',
|
||||
favicon: 'image/favicon.ico',
|
||||
};
|
||||
|
||||
const feed = await generateBlogFeed(
|
||||
{
|
||||
siteDir,
|
||||
siteConfig,
|
||||
generatedFilesDir,
|
||||
} as LoadContext,
|
||||
{
|
||||
path: 'blog',
|
||||
routeBasePath: 'blog',
|
||||
include: ['*r*.md', '*.mdx'], // skip no-date.md - it won't play nice with snapshots
|
||||
feedOptions: {
|
||||
type: feedType as any,
|
||||
copyright: 'Copyright',
|
||||
},
|
||||
} as PluginOptions,
|
||||
);
|
||||
const feedContent =
|
||||
feed && (feedType === 'rss' ? feed.rss2() : feed.atom1());
|
||||
expect(feedContent).toMatchSnapshot();
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
147
packages/docusaurus-plugin-content-blog/src/blogUtils.ts
Normal file
147
packages/docusaurus-plugin-content-blog/src/blogUtils.ts
Normal file
|
@ -0,0 +1,147 @@
|
|||
import fs from 'fs-extra';
|
||||
import globby from 'globby';
|
||||
import path from 'path';
|
||||
import {Feed} from 'feed';
|
||||
import {PluginOptions, BlogPost, DateLink} from './types';
|
||||
import {parse, normalizeUrl} from '@docusaurus/utils';
|
||||
import {LoadContext} from '@docusaurus/types';
|
||||
|
||||
export function truncate(fileString: string, truncateMarker: RegExp | string) {
|
||||
const truncated =
|
||||
typeof truncateMarker === 'string'
|
||||
? fileString.includes(truncateMarker)
|
||||
: truncateMarker.test(fileString);
|
||||
return truncated ? fileString.split(truncateMarker)[0] : fileString;
|
||||
}
|
||||
|
||||
// YYYY-MM-DD-{name}.mdx?
|
||||
// prefer named capture, but old node version do not support
|
||||
const FILENAME_PATTERN = /^(\d{4}-\d{1,2}-\d{1,2})-?(.*?).mdx?$/;
|
||||
|
||||
function toUrl({date, link}: DateLink) {
|
||||
return `${date
|
||||
.toISOString()
|
||||
.substring(0, '2019-01-01'.length)
|
||||
.replace(/-/g, '/')}/${link}`;
|
||||
}
|
||||
|
||||
export async function generateBlogFeed(
|
||||
context: LoadContext,
|
||||
options: PluginOptions,
|
||||
) {
|
||||
if (!options.feedOptions) {
|
||||
throw new Error(
|
||||
'Invalid options - `feedOptions` is not expected to be null.',
|
||||
);
|
||||
}
|
||||
const {siteDir, siteConfig} = context;
|
||||
const contentPath = path.resolve(siteDir, options.path);
|
||||
const blogPosts = await generateBlogPosts(contentPath, context, options);
|
||||
if (blogPosts == null) {
|
||||
return null;
|
||||
}
|
||||
|
||||
const {feedOptions, routeBasePath} = options;
|
||||
const {url: siteUrl, title, favicon} = siteConfig;
|
||||
const blogBaseUrl = normalizeUrl([siteUrl, routeBasePath]);
|
||||
|
||||
const updated =
|
||||
(blogPosts[0] && blogPosts[0].metadata.date) ||
|
||||
new Date('2015-10-25T16:29:00.000-07:00');
|
||||
|
||||
const feed = new Feed({
|
||||
id: blogBaseUrl,
|
||||
title: feedOptions.title || `${title} Blog`,
|
||||
updated,
|
||||
language: feedOptions.language,
|
||||
link: blogBaseUrl,
|
||||
description: feedOptions.description || `${siteConfig.title} Blog`,
|
||||
favicon: normalizeUrl([siteUrl, favicon]),
|
||||
copyright: feedOptions.copyright,
|
||||
});
|
||||
|
||||
blogPosts.forEach(post => {
|
||||
const {
|
||||
id,
|
||||
metadata: {title, permalink, date, description},
|
||||
} = post;
|
||||
feed.addItem({
|
||||
title,
|
||||
id: id,
|
||||
link: normalizeUrl([siteUrl, permalink]),
|
||||
date,
|
||||
description,
|
||||
});
|
||||
});
|
||||
|
||||
return feed;
|
||||
}
|
||||
|
||||
export async function generateBlogPosts(
|
||||
blogDir: string,
|
||||
{siteConfig, siteDir}: LoadContext,
|
||||
options: PluginOptions,
|
||||
) {
|
||||
const {include, routeBasePath} = options;
|
||||
|
||||
if (!fs.existsSync(blogDir)) {
|
||||
return null;
|
||||
}
|
||||
|
||||
const {baseUrl = ''} = siteConfig;
|
||||
const blogFiles = await globby(include, {
|
||||
cwd: blogDir,
|
||||
});
|
||||
|
||||
const blogPosts: BlogPost[] = [];
|
||||
|
||||
await Promise.all(
|
||||
blogFiles.map(async (relativeSource: string) => {
|
||||
// Cannot use path.join() as it resolves '../' and removes the '@site'. Let webpack loader resolve it.
|
||||
const source = path.join(blogDir, relativeSource);
|
||||
const aliasedSource = `@site/${path.relative(siteDir, source)}`;
|
||||
const blogFileName = path.basename(relativeSource);
|
||||
|
||||
const fileString = await fs.readFile(source, 'utf-8');
|
||||
const {frontMatter, excerpt} = parse(fileString);
|
||||
|
||||
let date;
|
||||
// extract date and title from filename
|
||||
const match = blogFileName.match(FILENAME_PATTERN);
|
||||
let linkName = blogFileName.replace(/\.mdx?$/, '');
|
||||
if (match) {
|
||||
const [, dateString, name] = match;
|
||||
date = new Date(dateString);
|
||||
linkName = name;
|
||||
}
|
||||
// prefer usedefined date
|
||||
if (frontMatter.date) {
|
||||
date = new Date(frontMatter.date);
|
||||
}
|
||||
// use file create time for blog
|
||||
date = date || (await fs.stat(source)).birthtime;
|
||||
frontMatter.title = frontMatter.title || linkName;
|
||||
|
||||
blogPosts.push({
|
||||
id: frontMatter.id || frontMatter.title,
|
||||
metadata: {
|
||||
permalink: normalizeUrl([
|
||||
baseUrl,
|
||||
routeBasePath,
|
||||
frontMatter.id || toUrl({date, link: linkName}),
|
||||
]),
|
||||
source: aliasedSource,
|
||||
description: frontMatter.description || excerpt,
|
||||
date,
|
||||
tags: frontMatter.tags,
|
||||
title: frontMatter.title,
|
||||
},
|
||||
});
|
||||
}),
|
||||
);
|
||||
blogPosts.sort(
|
||||
(a, b) => b.metadata.date.getTime() - a.metadata.date.getTime(),
|
||||
);
|
||||
|
||||
return blogPosts;
|
||||
}
|
|
@ -5,16 +5,13 @@
|
|||
* LICENSE file in the root directory of this source tree.
|
||||
*/
|
||||
import fs from 'fs-extra';
|
||||
import globby from 'globby';
|
||||
import _ from 'lodash';
|
||||
import path from 'path';
|
||||
import {parse, normalizeUrl, docuHash} from '@docusaurus/utils';
|
||||
import {normalizeUrl, docuHash} from '@docusaurus/utils';
|
||||
|
||||
import {
|
||||
DateLink,
|
||||
PluginOptions,
|
||||
BlogTags,
|
||||
BlogPost,
|
||||
Tag,
|
||||
BlogContent,
|
||||
BlogItemsToModules,
|
||||
|
@ -24,19 +21,10 @@ import {
|
|||
LoadContext,
|
||||
PluginContentLoadedActions,
|
||||
ConfigureWebpackUtils,
|
||||
Props,
|
||||
} from '@docusaurus/types';
|
||||
import {Configuration} from 'webpack';
|
||||
|
||||
// YYYY-MM-DD-{name}.mdx?
|
||||
// prefer named capture, but old node version do not support
|
||||
const FILENAME_PATTERN = /^(\d{4}-\d{1,2}-\d{1,2})-?(.*?).mdx?$/;
|
||||
|
||||
function toUrl({date, link}: DateLink) {
|
||||
return `${date
|
||||
.toISOString()
|
||||
.substring(0, '2019-01-01'.length)
|
||||
.replace(/-/g, '/')}/${link}`;
|
||||
}
|
||||
import {generateBlogFeed, generateBlogPosts} from './blogUtils';
|
||||
|
||||
const DEFAULT_OPTIONS: PluginOptions = {
|
||||
path: 'blog', // Path to data on filesystem, relative to site dir.
|
||||
|
@ -74,69 +62,13 @@ export default function pluginContentBlog(
|
|||
|
||||
// Fetches blog contents and returns metadata for the necessary routes.
|
||||
async loadContent() {
|
||||
const {postsPerPage, include, routeBasePath} = options;
|
||||
const {siteConfig, siteDir} = context;
|
||||
const blogDir = contentPath;
|
||||
const {postsPerPage, routeBasePath} = options;
|
||||
|
||||
if (!fs.existsSync(blogDir)) {
|
||||
const blogPosts = await generateBlogPosts(contentPath, context, options);
|
||||
if (!blogPosts) {
|
||||
return null;
|
||||
}
|
||||
|
||||
const {baseUrl = ''} = siteConfig;
|
||||
const blogFiles = await globby(include, {
|
||||
cwd: blogDir,
|
||||
});
|
||||
|
||||
const blogPosts: BlogPost[] = [];
|
||||
|
||||
await Promise.all(
|
||||
blogFiles.map(async (relativeSource: string) => {
|
||||
// Cannot use path.join() as it resolves '../' and removes the '@site'. Let webpack loader resolve it.
|
||||
const source = path.join(blogDir, relativeSource);
|
||||
const aliasedSource = `@site/${path.relative(siteDir, source)}`;
|
||||
const blogFileName = path.basename(relativeSource);
|
||||
|
||||
const fileString = await fs.readFile(source, 'utf-8');
|
||||
const {frontMatter, excerpt} = parse(fileString);
|
||||
|
||||
let date;
|
||||
// extract date and title from filename
|
||||
const match = blogFileName.match(FILENAME_PATTERN);
|
||||
let linkName = blogFileName.replace(/\.mdx?$/, '');
|
||||
if (match) {
|
||||
const [, dateString, name] = match;
|
||||
date = new Date(dateString);
|
||||
linkName = name;
|
||||
}
|
||||
// prefer usedefined date
|
||||
if (frontMatter.date) {
|
||||
date = new Date(frontMatter.date);
|
||||
}
|
||||
// use file create time for blog
|
||||
date = date || (await fs.stat(source)).birthtime;
|
||||
frontMatter.title = frontMatter.title || linkName;
|
||||
|
||||
blogPosts.push({
|
||||
id: frontMatter.id || frontMatter.title,
|
||||
metadata: {
|
||||
permalink: normalizeUrl([
|
||||
baseUrl,
|
||||
routeBasePath,
|
||||
frontMatter.id || toUrl({date, link: linkName}),
|
||||
]),
|
||||
source: aliasedSource,
|
||||
description: frontMatter.description || excerpt,
|
||||
date,
|
||||
tags: frontMatter.tags,
|
||||
title: frontMatter.title,
|
||||
},
|
||||
});
|
||||
}),
|
||||
);
|
||||
blogPosts.sort(
|
||||
(a, b) => b.metadata.date.getTime() - a.metadata.date.getTime(),
|
||||
);
|
||||
|
||||
// Colocate next and prev metadata
|
||||
blogPosts.forEach((blogPost, index) => {
|
||||
const prevItem = index > 0 ? blogPosts[index - 1] : null;
|
||||
|
@ -160,6 +92,9 @@ export default function pluginContentBlog(
|
|||
// Example: `/blog`, `/blog/page/1`, `/blog/page/2`
|
||||
const totalCount = blogPosts.length;
|
||||
const numberOfPages = Math.ceil(totalCount / postsPerPage);
|
||||
const {
|
||||
siteConfig: {baseUrl = ''},
|
||||
} = context;
|
||||
const basePageUrl = normalizeUrl([baseUrl, routeBasePath]);
|
||||
|
||||
const blogListPaginated = [];
|
||||
|
@ -442,5 +377,41 @@ export default function pluginContentBlog(
|
|||
},
|
||||
};
|
||||
},
|
||||
|
||||
async postBuild({outDir}: Props) {
|
||||
if (!options.feedOptions) {
|
||||
return;
|
||||
}
|
||||
|
||||
const {
|
||||
feedOptions: {type: feedType},
|
||||
} = options;
|
||||
const feed = await generateBlogFeed(context, options);
|
||||
if (!feed) {
|
||||
return;
|
||||
}
|
||||
let feedTypes = [];
|
||||
if (feedType === 'all') {
|
||||
feedTypes = ['rss', 'atom'];
|
||||
} else {
|
||||
feedTypes.push(feedType);
|
||||
}
|
||||
|
||||
await Promise.all(
|
||||
feedTypes.map(feedType => {
|
||||
const feedPath = path.join(
|
||||
outDir,
|
||||
options.routeBasePath,
|
||||
`${feedType}.xml`,
|
||||
);
|
||||
const feedContent = feedType === 'rss' ? feed.rss2() : feed.atom1();
|
||||
return fs.writeFile(feedPath, feedContent, err => {
|
||||
if (err) {
|
||||
throw new Error(`Generating ${feedType} feed failed: ${err}`);
|
||||
}
|
||||
});
|
||||
}),
|
||||
);
|
||||
},
|
||||
};
|
||||
}
|
||||
|
|
|
@ -7,24 +7,19 @@
|
|||
|
||||
const {parseQuery, getOptions} = require('loader-utils');
|
||||
import {loader} from 'webpack';
|
||||
import {truncate} from './blogUtils';
|
||||
|
||||
export = function(fileString: string) {
|
||||
const callback = this.async();
|
||||
|
||||
const {truncateMarker} = getOptions(this);
|
||||
const {truncateMarker}: {truncateMarker: RegExp | string} = getOptions(this);
|
||||
|
||||
let finalContent = fileString;
|
||||
|
||||
// Truncate content if requested (e.g: file.md?truncated=true)
|
||||
const {truncated} = this.resourceQuery && parseQuery(this.resourceQuery);
|
||||
if (
|
||||
truncated &&
|
||||
(typeof truncateMarker === 'string'
|
||||
? fileString.includes(truncateMarker)
|
||||
: truncateMarker.test(fileString))
|
||||
) {
|
||||
// eslint-disable-next-line
|
||||
finalContent = fileString.split(truncateMarker)[0];
|
||||
if (truncated) {
|
||||
finalContent = truncate(fileString, truncateMarker);
|
||||
}
|
||||
return callback && callback(null, finalContent);
|
||||
} as loader.Loader;
|
||||
|
|
|
@ -22,6 +22,13 @@ export interface PluginOptions {
|
|||
remarkPlugins: string[];
|
||||
rehypePlugins: string[];
|
||||
truncateMarker: RegExp | string;
|
||||
feedOptions?: {
|
||||
type: 'rss' | 'atom' | 'all';
|
||||
title?: string;
|
||||
description?: string;
|
||||
copyright: string;
|
||||
language?: string;
|
||||
};
|
||||
}
|
||||
|
||||
export interface BlogTags {
|
||||
|
|
|
@ -93,6 +93,21 @@ module.exports = {
|
|||
*/
|
||||
remarkPlugins: [],
|
||||
rehypePlugins: [],
|
||||
/**
|
||||
* Truncate marker, can be a regex or string.
|
||||
*/
|
||||
truncateMarker: /<!--\s*(truncate)\s*-->/
|
||||
/**
|
||||
* Blog feed
|
||||
* If feedOptions is undefined, no rss feed will be generated
|
||||
*/
|
||||
feedOptions: {
|
||||
type: '', // required. 'rss' | 'feed' | 'all'
|
||||
title: '', // default to siteConfig.title
|
||||
description: '', // default to `${siteConfig.title} Blog`
|
||||
copyright: '',
|
||||
language: undefined; // possible values: http://www.w3.org/TR/REC-html40/struct/dirlang.html#langcodes
|
||||
};
|
||||
},
|
||||
],
|
||||
],
|
||||
|
|
|
@ -74,6 +74,42 @@ Not this.
|
|||
Or this.
|
||||
```
|
||||
|
||||
## Feed
|
||||
|
||||
You can generate RSS/ Atom feed by passing feedOptions.
|
||||
|
||||
```ts
|
||||
feedOptions?: {
|
||||
type: 'rss' | 'atom' | 'all';
|
||||
title?: string;
|
||||
description?: string;
|
||||
copyright: string;
|
||||
language?: string; // possible values: http://www.w3.org/TR/REC-html40/struct/dirlang.html#langcodes
|
||||
};
|
||||
```
|
||||
|
||||
Example usage:
|
||||
|
||||
```js {9-12}
|
||||
// docusaurus.config.js
|
||||
module.exports = {
|
||||
// ...
|
||||
presets: [
|
||||
[
|
||||
'@docusaurus/preset-classic',
|
||||
{
|
||||
blog: {
|
||||
feedOptions: {
|
||||
type: 'all',
|
||||
copyright: `Copyright © ${new Date().getFullYear()} Facebook, Inc.`
|
||||
},
|
||||
},
|
||||
},
|
||||
],
|
||||
],
|
||||
};
|
||||
```
|
||||
|
||||
## Advanced topics
|
||||
|
||||
### Blog-only mode
|
||||
|
|
|
@ -40,6 +40,10 @@ module.exports = {
|
|||
blog: {
|
||||
path: '../website-1.x/blog',
|
||||
postsPerPage: 3,
|
||||
feedOptions: {
|
||||
type: 'all',
|
||||
copyright: `Copyright © ${new Date().getFullYear()} Facebook, Inc.`,
|
||||
},
|
||||
},
|
||||
theme: {
|
||||
customCss: require.resolve('./src/css/custom.css'),
|
||||
|
|
14
yarn.lock
14
yarn.lock
|
@ -6936,6 +6936,13 @@ feed@^1.1.0:
|
|||
dependencies:
|
||||
xml "^1.0.1"
|
||||
|
||||
feed@^4.0.0:
|
||||
version "4.0.0"
|
||||
resolved "https://registry.yarnpkg.com/feed/-/feed-4.0.0.tgz#d3c2bfb9aba3c0efacab1ed15be06b67f5c02539"
|
||||
integrity sha512-VWtvINgG7cA91BtrGychMvxHj84nc8xS9W/PuAHlY62I8owZtcoxNaKFN+zkGl8tBsaYbxrrp4yB9DhqKNQSPw==
|
||||
dependencies:
|
||||
xml-js "^1.6.11"
|
||||
|
||||
figgy-pudding@^3.4.1, figgy-pudding@^3.5.1:
|
||||
version "3.5.1"
|
||||
resolved "https://registry.yarnpkg.com/figgy-pudding/-/figgy-pudding-3.5.1.tgz#862470112901c727a0e495a80744bd5baa1d6790"
|
||||
|
@ -16515,6 +16522,13 @@ xhr@^2.0.1:
|
|||
parse-headers "^2.0.0"
|
||||
xtend "^4.0.0"
|
||||
|
||||
xml-js@^1.6.11:
|
||||
version "1.6.11"
|
||||
resolved "https://registry.yarnpkg.com/xml-js/-/xml-js-1.6.11.tgz#927d2f6947f7f1c19a316dd8eea3614e8b18f8e9"
|
||||
integrity sha512-7rVi2KMfwfWFl+GpPg6m80IVMWXLRjO+PxTq7V2CDhoGak0wzYzFgUY2m4XJ47OGdXd8eLE8EmwfAmdjw7lC1g==
|
||||
dependencies:
|
||||
sax "^1.2.4"
|
||||
|
||||
xml-name-validator@^3.0.0:
|
||||
version "3.0.0"
|
||||
resolved "https://registry.yarnpkg.com/xml-name-validator/-/xml-name-validator-3.0.0.tgz#6ae73e06de4d8c6e47f9fb181f78d648ad457c6a"
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue