chore: clean up ESLint config, enable a few rules (#6514)

* chore: clean up ESLint config, enable a few rules

* enable max-len for comments

* fix build
This commit is contained in:
Joshua Chen 2022-01-31 10:31:24 +08:00 committed by GitHub
parent b8ccb869f1
commit aa446b7a9c
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
167 changed files with 1157 additions and 960 deletions

View file

@ -8,8 +8,8 @@
/*
We delay a bit the i18n staging deployment
Because sometimes, prod + i18n-staging call this script at the exact same time
And then both try to dl the translations at the same time, and then we have a 409 error
This delay makes sure prod starts to dl the translations in priority
And then both try to dl the translations at the same time, and then we have a
409 error. This delay makes sure prod starts to dl the translations in priority
Used in conjunction with waitForCrowdin.js (which is not enough)
*/

View file

@ -500,7 +500,8 @@ const config = {
},
{
title: 'Legal',
// Please do not remove the privacy and terms, it's a legal requirement.
// Please don't remove the privacy and terms, it's a legal
// requirement.
items: [
{
label: 'Privacy',

View file

@ -123,7 +123,8 @@ function ColorGenerator(): JSX.Element {
<input
type="color"
className={styles.colorInput}
// value has to always be a valid color, so baseColor instead of inputColor
// value has to always be a valid color, so baseColor instead of
// inputColor
value={baseColor}
onChange={updateColor}
/>

View file

@ -25,7 +25,8 @@ declare global {
expect.extend({
toHaveGoodDimensions({width, height}: {width: number; height: number}) {
// Put this one first because aspect ratio is harder to fix than resizing (need to take another screenshot)
// Put this one first because aspect ratio is harder to fix than resizing
// (need to take another screenshot)
if (width / height < 0.5) {
return {
pass: false,

View file

@ -13,15 +13,16 @@ import {sortBy} from '@site/src/utils/jsUtils';
* ADD YOUR SITE TO THE DOCUSAURUS SHOWCASE:
*
* Requirements for adding your site to our showcase:
* - It is a production-ready site with real content and decent customizations (different from the init templates)
* - It is a production-ready site with real content and decent customizations
* (different from the init templates)
* - It is NOT a work-in-progress with empty pages
* - It has a stable domain name (a Netlify/Vercel deploy preview is not allowed)
* - It has a stable domain (a Netlify/Vercel deploy preview is not allowed)
*
* Instructions:
* - Add your site in the json array below
* - Add a local image preview (decent screenshot of your Docusaurus site)
* - Use relevant tags to qualify your site (read the tag descriptions below)
* - The image MUST be added to the GitHub repository, and use `require("image")`
* - The image MUST be added to the GitHub repository, and use `require("img")`
* - The image has to have minimum width 640 and an aspect of no wider than 2:1
* - Open a PR and check for reported CI errors
*
@ -115,7 +116,7 @@ export const Tags: Record<TagType, Tag> = {
color: '#fe6829',
},
// Large Docusaurus sites, with a lot of content (> 200 pages, excluding versions)
// Large sites, with a lot of content (> 200 pages, excluding versions)
large: {
label: 'Large',
description:
@ -1658,7 +1659,7 @@ const Users: User[] = [
preview: require('./showcase/verida-developers.png'),
website: 'https://developers.verida.io/',
source: 'https://github.com/verida/documentation',
tags: ['opensource', 'product'],
tags: ['opensource', 'product'],
},
{
title: 'Vue NodeGui',

View file

@ -93,9 +93,8 @@ function filterUsers(
}
if (operator === 'AND') {
return selectedTags.every((tag) => user.tags.includes(tag));
} else {
return selectedTags.some((tag) => user.tags.includes(tag));
}
return selectedTags.some((tag) => user.tags.includes(tag));
});
}
@ -105,7 +104,8 @@ function useFilteredUsers() {
// On SSR / first mount (hydration) no tag is selected
const [selectedTags, setSelectedTags] = useState<TagType[]>([]);
const [searchName, setSearchName] = useState<string | null>(null);
// Sync tags from QS to state (delayed on purpose to avoid SSR/Client hydration mismatch)
// Sync tags from QS to state (delayed on purpose to avoid SSR/Client
// hydration mismatch)
useEffect(() => {
setSelectedTags(readSearchTags(location.search));
setOperator(readOperator(location.search));

View file

@ -17,7 +17,9 @@ const syncAvatars = require('./syncAvatars');
* the reverse. Therefore, our publish time has a "fake hour" to order them.
*/
const publishTimes = new Set();
/** @type {Record<string, {name: string, url: string, alias: string, imageURL: string}>} */
/**
* @type {Record<string, {name: string, url: string,alias: string, imageURL: string}>}
*/
const authorsMap = {};
/**

View file

@ -14,12 +14,14 @@ const fs = require('fs-extra');
// TODO not sure how the syncing should be done at all... for now it always
// pretends the limit is reached. We should only fetch a portion of the avatars
// at a time. But seems avatars.githubusercontent.com API doesn't like HTTP requests?
// at a time. But seems avatars.githubusercontent.com API doesn't like HTTP
// requests?
/**
* @param {string} username
* @param {Record<string, number>} lastUpdateCache
* @param {Record<string, {imageURL: string; url: string}>} authorsMap
* @returns true if saved successfully (including not found); false if limited reached
* @returns true if saved successfully (including not found); false if limit
* reached
*/
// eslint-disable-next-line @typescript-eslint/no-unused-vars
async function fetchImage(username, lastUpdateCache, authorsMap) {

View file

@ -22,7 +22,8 @@ function ChangelogAuthor({author}: Props): JSX.Element {
src={imageURL}
alt={name}
onError={(e) => {
// Image returns 404 if the user's handle changes. We display a fallback instead.
// Image returns 404 if the user's handle changes. We display a
// fallback instead.
e.currentTarget.src =
'data:image/svg+xml,<svg xmlns="http://www.w3.org/2000/svg" width="600" height="600" fill="none" stroke="%2325c2a0" stroke-width="30" version="1.1"><circle cx="300" cy="230" r="115"/><path stroke-linecap="butt" d="M106.81863443903,481.4 a205,205 1 0,1 386.36273112194,0"/></svg>';
}}

View file

@ -16,7 +16,8 @@ import TOC from '@theme/TOC';
import Link from '@docusaurus/Link';
// This page doesn't change anything. It's just swapping BlogPostItem with our
// own ChangelogItem. We don't want to apply the swizzled item to the actual blog.
// own ChangelogItem. We don't want to apply the swizzled item to the actual
// blog.
function BlogPostPage(props: Props): JSX.Element {
const {content: BlogPostContents, sidebar} = props;
const {assets, metadata} = BlogPostContents;

View file

@ -8,7 +8,8 @@
import visit from 'unist-util-visit';
/**
* Turns a "```js config-tabs" code block into a "plugin options" and a "preset options" tab
* Turns a "```js config-tabs" code block into a "plugin options" and a "preset
* options" tab
*/
export default function plugin() {
const transformer = (root) => {
@ -71,7 +72,8 @@ export default function plugin() {
{
type: 'link',
title: null,
// TODO make this version-aware; maybe we need a useVersionedLink() hook
// TODO make this version-aware; maybe we need a
// useVersionedLink() hook
url: '/docs/using-plugins#docusauruspreset-classic',
children: [
{

View file

@ -27,9 +27,8 @@ export function toggleListItem<T>(list: T[], item: T): T[] {
const itemIndex = list.indexOf(item);
if (itemIndex === -1) {
return list.concat(item);
} else {
const newList = [...list];
newList.splice(itemIndex, 1);
return newList;
}
const newList = [...list];
newList.splice(itemIndex, 1);
return newList;
}

View file

@ -20,8 +20,8 @@ See also
- https://github.com/facebook/docusaurus/issues/3678
- https://github.com/facebook/docusaurus/pull/5987
TODO we should probably add a real e2e test in core instead of using our own website?
Current solution looks good-enough for now
TODO we should probably add a real e2e test in core instead of using our own
website? Current solution looks good-enough for now
*/

View file

@ -8,14 +8,16 @@
import {Translations} from '@crowdin/crowdin-api-client';
/*
Crowdin does not support concurrent "project builds" (downloads of translations).
The Crowdin CLI fails with error 409, and it leads to failures on Netlify.
Crowdin does not support concurrent "project builds" (downloads of
translations). The Crowdin CLI fails with error 409, and it leads to failures on
Netlify.
On Docusaurus, when we commit on main, we have 2 Netlify deployments triggered:
- prod
- i18n-staging (work-in-progress locales)
This script helps the 2 deployments to not download translations concurrently from Crowdin.
This script helps the 2 deployments to not download translations concurrently
from Crowdin.
*/
const pollInterval = 5000;