Merge branch 'main' into slorber/fix-docs-category-index-translation-key-conflict

This commit is contained in:
sebastien 2025-06-26 18:10:40 +02:00
commit 13828934b4
252 changed files with 10021 additions and 8162 deletions

View file

@ -1,4 +1,5 @@
---
id: file-name-1
slug: file-name-1
---

View file

@ -1,4 +1,5 @@
---
id: file-name-2
slug: file-name-2
---

View file

@ -0,0 +1,14 @@
---
sidebar_label: 'Dir with unique index label'
sidebar_class_name: 'dogfood_sidebar_class_name_test'
sidebar_custom_props:
prop: custom
number: 1
boolean: true
---
# Single index.md in dir
This doc has `sidebar_class_*` front matter
Dogfood test for bug https://github.com/facebook/docusaurus/issues/11258

View file

@ -4,4 +4,4 @@ sidebar_class_name: 'dogfood_sidebar_class_name_test'
# Doc With Sidebar Class Name
This doc has `sidebar_label` front matter
This doc has `sidebar_class_name` front matter

View file

@ -275,6 +275,14 @@ By default, it prints a warning, to let you know about your broken anchors.
### `onBrokenMarkdownLinks` {#onBrokenMarkdownLinks}
:::warning Deprecated
Deprecated in Docusaurus v3.9, and will be removed in Docusaurus v4.
Replaced by [`siteConfig.markdown.hooks.onBrokenMarkdownLinks`](#hooks.onBrokenMarkdownLinks)
:::
- Type: `'ignore' | 'log' | 'warn' | 'throw'`
The behavior of Docusaurus when it detects any broken Markdown link.
@ -511,14 +519,35 @@ type MarkdownAnchorsConfig = {
maintainCase: boolean;
};
type OnBrokenMarkdownLinksFunction = (params: {
sourceFilePath: string; // MD/MDX source file relative to cwd
url: string; // Link url
node: Link | Definition; // mdast Node
}) => void | string;
type OnBrokenMarkdownImagesFunction = (params: {
sourceFilePath: string; // MD/MDX source file relative to cwd
url: string; // Image url
node: Image; // mdast node
}) => void | string;
type ReportingSeverity = 'ignore' | 'log' | 'warn' | 'throw';
type MarkdownHooks = {
onBrokenMarkdownLinks: ReportingSeverity | OnBrokenMarkdownLinksFunction;
onBrokenMarkdownImages: ReportingSeverity | OnBrokenMarkdownImagesFunction;
};
type MarkdownConfig = {
format: 'mdx' | 'md' | 'detect';
mermaid: boolean;
emoji: boolean;
preprocessor?: MarkdownPreprocessor;
parseFrontMatter?: ParseFrontMatter;
mdx1Compat: MDX1CompatOptions;
remarkRehypeOptions: object; // see https://github.com/remarkjs/remark-rehype#options
anchors: MarkdownAnchorsConfig;
hooks: MarkdownHooks;
};
```
@ -529,6 +558,7 @@ export default {
markdown: {
format: 'mdx',
mermaid: true,
emoji: true,
preprocessor: ({filePath, fileContent}) => {
return fileContent.replaceAll('{{MY_VAR}}', 'MY_VALUE');
},
@ -546,6 +576,10 @@ export default {
anchors: {
maintainCase: true,
},
hooks: {
onBrokenMarkdownLinks: 'warn',
onBrokenMarkdownImages: 'throw',
},
},
};
```
@ -558,11 +592,15 @@ export default {
| --- | --- | --- | --- |
| `format` | `'mdx' \| 'md' \| 'detect'` | `'mdx'` | The default parser format to use for Markdown content. Using 'detect' will select the appropriate format automatically based on file extensions: `.md` vs `.mdx`. |
| `mermaid` | `boolean` | `false` | When `true`, allows Docusaurus to render Markdown code blocks with `mermaid` language as Mermaid diagrams. |
| `emoji` | `boolean` | `true` | When `true`, allows Docusaurus to render emoji shortcodes (e.g., `:+1:`) as Unicode emoji (👍). When `false`, emoji shortcodes are left as-is. |
| `preprocessor` | `MarkdownPreprocessor` | `undefined` | Gives you the ability to alter the Markdown content string before parsing. Use it as a last-resort escape hatch or workaround: it is almost always better to implement a Remark/Rehype plugin. |
| `parseFrontMatter` | `ParseFrontMatter` | `undefined` | Gives you the ability to provide your own front matter parser, or to enhance the default parser. Read our [front matter guide](../guides/markdown-features/markdown-features-intro.mdx#front-matter) for details. |
| `mdx1Compat` | `MDX1CompatOptions` | `{comments: true, admonitions: true, headingIds: true}` | Compatibility options to make it easier to upgrade to Docusaurus v3+. |
| `anchors` | `MarkdownAnchorsConfig` | `{maintainCase: false}` | Options to control the behavior of anchors generated from Markdown headings |
| `remarkRehypeOptions` | `object` | `undefined` | Makes it possible to pass custom [`remark-rehype` options](https://github.com/remarkjs/remark-rehype#options). |
| `hooks` | `MarkdownHooks` | `object` | Make it possible to customize the MDX loader behavior with callbacks or built-in options. |
| `hooks.onBrokenMarkdownLinks` | `ReportingSeverity \| OnBrokenMarkdownLinksFunction` | `'warn'` | Hook to customize the behavior when encountering a broken Markdown link URL. With the callback function, you can return a new link URL, or alter the link [mdast node](https://github.com/syntax-tree/mdast). |
| `hooks.onBrokenMarkdownLinks` | `ReportingSeverity \| OnBrokenMarkdownImagesFunction` | `'throw'` | Hook to customize the behavior when encountering a broken Markdown image URL. With the callback function, you can return a new image URL, or alter the image [mdast node](https://github.com/syntax-tree/mdast). |
```mdx-code-block
</APITable>

View file

@ -407,7 +407,7 @@ jobs:
- name: Install dependencies
run: npm ci
- name: Build website
run: npm build
run: npm run build
- name: Upload Build Artifact
uses: actions/upload-pages-artifact@v3
@ -461,7 +461,7 @@ jobs:
- name: Install dependencies
run: npm ci
- name: Test build website
run: npm build
run: npm run build
```
</TabItem>

View file

@ -15,7 +15,6 @@ export default {
url: 'https://docusaurus.io',
// We can only warn now, since we have blog pages linking to non-blog pages...
onBrokenLinks: 'warn',
onBrokenMarkdownLinks: 'warn',
favicon: 'img/docusaurus.ico',
themes: ['live-codeblock'],
plugins: ['ideal-image'],

View file

@ -217,6 +217,9 @@ export default async function createConfigAsync() {
markdown: {
format: 'detect',
mermaid: true,
hooks: {
onBrokenMarkdownLinks: 'warn',
},
mdx1Compat: {
// comments: false,
},
@ -265,7 +268,6 @@ export default async function createConfigAsync() {
process.env.DOCUSAURUS_CURRENT_LOCALE !== defaultLocale
? 'warn'
: 'throw',
onBrokenMarkdownLinks: 'warn',
favicon: 'img/docusaurus.ico',
customFields: {
crashTest,
@ -829,14 +831,14 @@ export default async function createConfigAsync() {
{
html: `
<a href="https://www.netlify.com" target="_blank" rel="noreferrer noopener" aria-label="Deploys by Netlify">
<img src="https://www.netlify.com/img/global/badges/netlify-color-accent.svg" alt="Deploys by Netlify" width="114" height="51" />
<img src="/img/footer/badge-netlify.svg" alt="Deploys by Netlify" width="114" height="51" />
</a>
`,
},
{
html: `
<a href="https://argos-ci.com" target="_blank" rel="noreferrer noopener" aria-label="Covered by Argos">
<img src="https://argos-ci.com/badge.svg" alt="Covered by Argos" width="133" height="20" />
<img src="/img/footer/badge-argos.svg" alt="Covered by Argos" width="133" height="20" />
</a>
`,
},

View file

@ -1,6 +1,6 @@
{
"name": "website",
"version": "3.8.0",
"version": "3.8.1",
"private": true,
"scripts": {
"docusaurus": "docusaurus",
@ -39,20 +39,20 @@
"dependencies": {
"@crowdin/cli": "^3.13.0",
"@crowdin/crowdin-api-client": "^1.29.5",
"@docusaurus/core": "3.8.0",
"@docusaurus/logger": "3.8.0",
"@docusaurus/plugin-client-redirects": "3.8.0",
"@docusaurus/plugin-ideal-image": "3.8.0",
"@docusaurus/plugin-pwa": "3.8.0",
"@docusaurus/plugin-rsdoctor": "3.8.0",
"@docusaurus/preset-classic": "3.8.0",
"@docusaurus/remark-plugin-npm2yarn": "3.8.0",
"@docusaurus/theme-classic": "3.8.0",
"@docusaurus/theme-common": "3.8.0",
"@docusaurus/theme-live-codeblock": "3.8.0",
"@docusaurus/theme-mermaid": "3.8.0",
"@docusaurus/utils": "3.8.0",
"@docusaurus/utils-common": "3.8.0",
"@docusaurus/core": "3.8.1",
"@docusaurus/logger": "3.8.1",
"@docusaurus/plugin-client-redirects": "3.8.1",
"@docusaurus/plugin-ideal-image": "3.8.1",
"@docusaurus/plugin-pwa": "3.8.1",
"@docusaurus/plugin-rsdoctor": "3.8.1",
"@docusaurus/preset-classic": "3.8.1",
"@docusaurus/remark-plugin-npm2yarn": "3.8.1",
"@docusaurus/theme-classic": "3.8.1",
"@docusaurus/theme-common": "3.8.1",
"@docusaurus/theme-live-codeblock": "3.8.1",
"@docusaurus/theme-mermaid": "3.8.1",
"@docusaurus/utils": "3.8.1",
"@docusaurus/utils-common": "3.8.1",
"clsx": "^2.0.0",
"color": "^4.2.3",
"fs-extra": "^11.1.1",
@ -83,8 +83,8 @@
]
},
"devDependencies": {
"@docusaurus/eslint-plugin": "3.8.0",
"@docusaurus/tsconfig": "3.8.0",
"@docusaurus/eslint-plugin": "3.8.1",
"@docusaurus/tsconfig": "3.8.1",
"@types/color": "^3.0.4",
"@types/jest": "^29.5.3",
"cross-env": "^7.0.3",

View file

@ -143,7 +143,7 @@ html[data-theme='dark'] {
);
}
div[class^='announcementBar_'] {
.theme-announcement-bar {
font-size: 20px;
/*

View file

@ -8,131 +8,42 @@
import path from 'path';
import fs from 'fs-extra';
import pluginContentBlog from '@docusaurus/plugin-content-blog';
import {aliasedSitePath, docuHash, normalizeUrl} from '@docusaurus/utils';
import {
aliasedSitePath,
docuHash,
normalizeUrl,
safeGlobby,
} from '@docusaurus/utils';
import {createBlogFiles, toChangelogEntries} from './utils';
export {validateOptions} from '@docusaurus/plugin-content-blog';
/**
* Multiple versions may be published on the same day, causing the order to be
* the reverse. Therefore, our publish time has a "fake hour" to order them.
*/
// TODO may leak small amount of memory in multi-locale builds
const publishTimes = new Set<string>();
const MonorepoRoot = path.resolve(path.join(__dirname, '../../../..'));
type Author = {name: string; url: string; alias: string; imageURL: string};
const ChangelogFilePattern = 'CHANGELOG(-v[0-9]*)?.md';
type AuthorsMap = Record<string, Author>;
type ChangelogEntry = {
title: string;
content: string;
authors: Author[];
};
function parseAuthor(committerLine: string): Author {
const groups = committerLine.match(
/- (?:(?<name>.*?) \()?\[@(?<alias>.*)\]\((?<url>.*?)\)\)?/,
)!.groups as {name: string; alias: string; url: string};
return {
...groups,
name: groups.name ?? groups.alias,
imageURL: `https://github.com/${groups.alias}.png`,
};
}
function parseAuthors(content: string): Author[] {
const committersContent = content.match(/## Committers: \d.*/s)?.[0];
if (!committersContent) {
return [];
}
const committersLines = committersContent.match(/- .*/g)!;
const authors = committersLines
.map(parseAuthor)
.sort((a, b) => a.url.localeCompare(b.url));
return authors;
}
function createAuthorsMap(changelogEntries: ChangelogEntry[]): AuthorsMap {
const allAuthors = changelogEntries.flatMap((entry) => entry.authors);
const authorsMap: AuthorsMap = {};
allAuthors?.forEach((author) => {
authorsMap[author.alias] = author;
async function getChangelogFiles() {
const files = await safeGlobby([ChangelogFilePattern], {
cwd: MonorepoRoot,
});
return authorsMap;
}
function toChangelogEntry(sectionContent: string): ChangelogEntry | null {
const title = sectionContent
.match(/\n## .*/)?.[0]
.trim()
.replace('## ', '');
if (!title) {
return null;
// As of today, there are 2 changelog files
// and this is only going to increase
if (files.length < 2) {
throw new Error(
"Looks like the changelog plugin didn't detect Docusaurus changelog files",
);
}
const content = sectionContent
.replace(/\n## .*/, '')
.trim()
.replace('running_woman', 'running');
const authors = parseAuthors(content);
let hour = 20;
const date = title.match(/ \((?<date>.*)\)/)?.groups!.date;
while (publishTimes.has(`${date}T${hour}:00`)) {
hour -= 1;
}
publishTimes.add(`${date}T${hour}:00`);
return {
authors,
title: title.replace(/ \(.*\)/, ''),
content: `---
mdx:
format: md
date: ${`${date}T${hour}:00`}${
authors.length > 0
? `
authors:
${authors.map((author) => ` - '${author.alias}'`).join('\n')}`
: ''
}
---
# ${title.replace(/ \(.*\)/, '')}
<!-- truncate -->
${content.replace(/####/g, '##')}`,
};
// Note: the returned file order doesn't matter.
return files;
}
function toChangelogEntries(fileContent: string): ChangelogEntry[] {
return fileContent
.split(/(?=\n## )/)
.map(toChangelogEntry)
.filter((s): s is ChangelogEntry => s !== null);
function readChangelogFile(filename: string) {
return fs.readFile(path.join(MonorepoRoot, filename), 'utf-8');
}
async function createBlogFiles(
generateDir: string,
changelogEntries: ChangelogEntry[],
) {
await Promise.all(
changelogEntries.map((changelogEntry) =>
fs.outputFile(
path.join(generateDir, `${changelogEntry.title}.md`),
changelogEntry.content,
),
),
);
await fs.outputFile(
path.join(generateDir, 'authors.json'),
JSON.stringify(createAuthorsMap(changelogEntries), null, 2),
);
async function loadChangelogEntries(changelogFiles: string[]) {
const filesContent = await Promise.all(changelogFiles.map(readChangelogFile));
return toChangelogEntries(filesContent);
}
const ChangelogPlugin: typeof pluginContentBlog =
@ -145,14 +56,14 @@ const ChangelogPlugin: typeof pluginContentBlog =
blogListComponent: '@theme/ChangelogList',
blogPostComponent: '@theme/ChangelogPage',
});
const changelogPath = path.join(__dirname, '../../../../CHANGELOG.md');
const changelogFiles = await getChangelogFiles();
return {
...blogPlugin,
name: 'changelog-plugin',
async loadContent() {
const fileContent = await fs.readFile(changelogPath, 'utf-8');
const changelogEntries = toChangelogEntries(fileContent);
const changelogEntries = await loadChangelogEntries(changelogFiles);
// We have to create intermediate files here
// Unfortunately Docusaurus doesn't have yet any concept of virtual file
@ -199,8 +110,7 @@ const ChangelogPlugin: typeof pluginContentBlog =
},
getPathsToWatch() {
// Don't watch the generated dir
return [changelogPath];
return [path.join(MonorepoRoot, ChangelogFilePattern)];
},
};
};

View file

@ -0,0 +1,134 @@
/**
* Copyright (c) Facebook, Inc. and its affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree.
*/
import fs from 'fs-extra';
import path from 'path';
/**
* Multiple versions may be published on the same day, causing the order to be
* the reverse. Therefore, our publish time has a "fake hour" to order them.
*/
// TODO may leak small amount of memory in multi-locale builds
const publishTimes = new Set<string>();
type Author = {name: string; url: string; alias: string; imageURL: string};
type AuthorsMap = Record<string, Author>;
type ChangelogEntry = {
title: string;
content: string;
authors: Author[];
};
function parseAuthor(committerLine: string): Author {
const groups = committerLine.match(
/- (?:(?<name>.*?) \()?\[@(?<alias>.*)\]\((?<url>.*?)\)\)?/,
)!.groups as {name: string; alias: string; url: string};
return {
...groups,
name: groups.name ?? groups.alias,
imageURL: `https://github.com/${groups.alias}.png`,
};
}
function parseAuthors(content: string): Author[] {
const committersContent = content.match(/## Committers: \d.*/s)?.[0];
if (!committersContent) {
return [];
}
const committersLines = committersContent.match(/- .*/g)!;
const authors = committersLines
.map(parseAuthor)
.sort((a, b) => a.url.localeCompare(b.url));
return authors;
}
export function createAuthorsMap(
changelogEntries: ChangelogEntry[],
): AuthorsMap {
const allAuthors = changelogEntries.flatMap((entry) => entry.authors);
const authorsMap: AuthorsMap = {};
allAuthors?.forEach((author) => {
authorsMap[author.alias] = author;
});
return authorsMap;
}
function toChangelogEntry(sectionContent: string): ChangelogEntry | null {
const title = sectionContent
.match(/\n## .*/)?.[0]
.trim()
.replace('## ', '');
if (!title) {
return null;
}
const content = sectionContent
.replace(/\n## .*/, '')
.trim()
.replace('running_woman', 'running');
const authors = parseAuthors(content);
let hour = 20;
const date = title.match(/ \((?<date>.*)\)/)?.groups!.date;
while (publishTimes.has(`${date}T${hour}:00`)) {
hour -= 1;
}
publishTimes.add(`${date}T${hour}:00`);
return {
authors,
title: title.replace(/ \(.*\)/, ''),
content: `---
mdx:
format: md
date: ${`${date}T${hour}:00`}${
authors.length > 0
? `
authors:
${authors.map((author) => ` - '${author.alias}'`).join('\n')}`
: ''
}
---
# ${title.replace(/ \(.*\)/, '')}
<!-- truncate -->
${content.replace(/####/g, '##')}`,
};
}
export function toChangelogEntries(filesContent: string[]): ChangelogEntry[] {
return filesContent
.flatMap((content) => content.split(/(?=\n## )/))
.map(toChangelogEntry)
.filter((s): s is ChangelogEntry => s !== null);
}
export async function createBlogFiles(
generateDir: string,
changelogEntries: ChangelogEntry[],
): Promise<void> {
await Promise.all(
changelogEntries.map((changelogEntry) =>
fs.outputFile(
path.join(generateDir, `${changelogEntry.title}.md`),
changelogEntry.content,
),
),
);
await fs.outputFile(
path.join(generateDir, 'authors.json'),
JSON.stringify(createAuthorsMap(changelogEntries), null, 2),
);
}

File diff suppressed because one or more lines are too long

After

Width:  |  Height:  |  Size: 12 KiB

View file

@ -0,0 +1 @@
<svg width="114" height="50" viewBox="0 0 114 50" fill="none" xmlns="http://www.w3.org/2000/svg"><g clip-path="url(#a)"><path fill="#0C2A2A" d="M0 0h114v51H0z"/><g clip-path="url(#b)"><path d="M44.445 40.302v-7.695l.153-.16h1.853l.154.16v7.695l-.154.16h-1.853l-.153-.16ZM44.445 17.855v-7.696l.153-.159h1.853l.154.16v7.695l-.154.16h-1.853l-.153-.16ZM38.068 34.892h-.254l-1.275-1.323v-.264l2.388-2.482 1.349.002.18.185v1.4l-2.388 2.482ZM36.54 17.227v-.267l1.274-1.32h.254l2.388 2.479v1.398l-.18.19h-1.349l-2.388-2.48ZM28.154 24.107h10.5l.153.16v1.926l-.153.16h-10.5l-.154-.16v-1.927l.154-.16ZM99.844 24.11l.154.159v1.924l-.154.16H89.19l-.153-.16.77-1.924.154-.16h9.883Z" fill="#32E6E2"/><path d="M49 29.24h-1.853l-.153-.16v-4.502c0-.802-.303-1.423-1.234-1.443a24.2 24.2 0 0 0-1.613.025l-.089.092v5.826l-.153.16h-1.853l-.154-.16v-7.695l.154-.16h4.169c1.62 0 2.933 1.364 2.933 3.046v4.81l-.154.159v.002ZM57.974 25.871l-.154.16h-4.79l-.154.16c0 .32.31 1.283 1.546 1.283.463 0 .926-.16 1.082-.481l.154-.16h1.852l.154.16c-.154.962-.926 2.407-3.245 2.407-2.625 0-3.861-1.924-3.861-4.172 0-2.248 1.236-4.171 3.708-4.171 2.472 0 3.708 1.924 3.708 4.171v.643Zm-2.319-1.605c0-.16-.153-1.283-1.39-1.283-1.235 0-1.389 1.124-1.389 1.283l.154.16h2.472l.153-.16ZM62.298 26.833c0 .322.154.481.464.481h1.39l.153.16v1.604l-.154.16h-1.39c-1.389 0-2.625-.643-2.625-2.407v-3.529l-.154-.16H58.9l-.154-.159v-1.605l.154-.16h1.082l.154-.159v-1.443l.154-.16h1.852l.154.16v1.443l.154.16h1.699l.153.16v1.604l-.153.16h-1.7l-.153.159v3.529l.002.002ZM68.015 29.24h-1.853l-.153-.16V18.172l.153-.16h1.853l.154.16v10.907l-.154.16v.002ZM72.186 19.935h-1.852l-.154-.16v-1.604l.154-.16h1.852l.154.16v1.605l-.154.16Zm0 9.305h-1.852l-.154-.16v-7.7l.154-.159h1.852l.154.16v7.7l-.154.16ZM79.449 18.171v1.605l-.154.16h-1.39c-.309 0-.463.159-.463.48v.643l.154.16h1.546l.153.16v1.604l-.153.16h-1.546l-.154.159v5.774l-.153.16h-1.853l-.154-.16v-5.774l-.153-.16h-1.083l-.153-.159v-1.605l.153-.16h1.083l.153-.159v-.643c0-1.764 1.236-2.407 2.626-2.407h1.39l.153.16-.002.002ZM85.163 29.4c-.619 1.605-1.236 2.567-3.398 2.567h-.773l-.154-.16v-1.605l.154-.16h.773c.773 0 .926-.159 1.082-.64v-.16l-2.472-6.257v-1.604l.154-.16h1.39l.153.16 1.853 5.455h.153l1.853-5.455.154-.16h1.39l.153.16v1.604l-2.472 6.417.007-.002Z" fill="#fff"/></g><path fill="#32E6E2" d="M0 0v51h15V0z"/><path d="M5.518 45.367v-.774H10v.774H5.518Zm0-1.746c0-.44.098-.784.294-1.032.196-.248.474-.372.834-.372.348 0 .622.13.822.39.2.26.3.628.3 1.104l-.336.018c.004-.608.114-1.062.33-1.362.212-.3.524-.45.936-.45.424 0 .748.146.972.438.22.288.33.686.33 1.194v1.242h-.738v-1.152c0-.268-.05-.488-.15-.66a.498.498 0 0 0-.462-.258c-.224 0-.39.088-.498.264-.108.172-.162.39-.162.654v1.146h-.738V43.68c0-.192-.04-.36-.12-.504a.426.426 0 0 0-.396-.216c-.192 0-.33.068-.414.204a.955.955 0 0 0-.126.51v1.116h-.678v-1.17Zm0-2.925v-.774H8.17c.256 0 .472-.04.648-.12a.88.88 0 0 0 .39-.336.926.926 0 0 0 .126-.486.926.926 0 0 0-.126-.486.845.845 0 0 0-.39-.33 1.497 1.497 0 0 0-.648-.126H5.518v-.75h2.664c.604 0 1.068.146 1.392.438.324.288.486.71.486 1.266 0 .56-.162.984-.486 1.272-.324.288-.788.432-1.392.432H5.518Zm0-5.048v-.774H10v.774H5.518Zm0-2.475V32.4H10v.774H5.518Zm3.75-.546v-2.412H10v2.412h-.732Zm-3.69-3.903v-.773H10v.773H5.578Zm-.06 1.41v-3.6h.732v3.6h-.732Zm0-6.98v-.827l3.684-1.056v.318L5.518 20.43v-.888l3.684-1.122v.318L5.518 17.64v-.798L10 18.18v.81l-4.086 1.254v-.438L10 21.097v.756l-4.482 1.302Zm0-7.385v-.774H10v.774H5.518Zm.06-3.283v-.774H10v.774H5.578Zm-.06 1.41v-3.6h.732v3.6h-.732Zm0-4.7v-.773H10v.774H5.518Zm1.572-.545V6.179h.732V8.65H7.09ZM5.518 6.407v-.774H10v.774H5.518Z" fill="#0C2A2A"/></g><rect x="1" y="1" width="112" height="48" rx="3" stroke="#32E6E2" stroke-width="2"/><defs><clipPath id="a"><rect width="114" height="50" rx="4" fill="#fff"/></clipPath><clipPath id="b"><path fill="#fff" d="M28 10h72v30.462H28z"/></clipPath></defs></svg>

After

Width:  |  Height:  |  Size: 3.8 KiB

View file

Before

Width:  |  Height:  |  Size: 92 KiB

After

Width:  |  Height:  |  Size: 92 KiB

Before After
Before After

Some files were not shown because too many files have changed in this diff Show more