diff --git a/.github/workflows/lint-autofix.yml b/.github/workflows/lint-autofix.yml new file mode 100644 index 0000000000..ed857a3e91 --- /dev/null +++ b/.github/workflows/lint-autofix.yml @@ -0,0 +1,44 @@ +name: Lint AutoFix + +on: + pull_request: + branches: + - main + - docusaurus-v** + +concurrency: + group: ${{ github.workflow }}-${{ github.head_ref || github.run_id }} + cancel-in-progress: true + +jobs: + lint-autofix: + name: Lint AutoFix + runs-on: ubuntu-latest + + permissions: + contents: write + + steps: + - uses: actions/checkout@v4 + with: + repository: ${{ github.event.pull_request.head.repo.full_name }} + ref: ${{ github.head_ref }} + + - name: Installation + run: yarn + + - name: AutoFix Format + run: yarn format + + - name: AutoFix JS + run: yarn lint:js:fix + + - name: AutoFix Style + run: yarn lint:style:fix + + - name: AutoFix Spelling + run: yarn lint:spelling:fix + + - uses: stefanzweifel/git-auto-commit-action@v5 + with: + commit_message: 'refactor: apply lint autofix' diff --git a/.lintstagedrc.json b/.lintstagedrc.json index b25a96cf48..f526fd4b5b 100644 --- a/.lintstagedrc.json +++ b/.lintstagedrc.json @@ -1,8 +1,5 @@ { "*.{js,jsx,ts,tsx,mjs}": ["eslint --fix"], "*.css": ["stylelint --allow-empty-input --fix"], - "*": [ - "prettier --ignore-unknown --write", - "cspell --no-must-find-files --no-progress" - ] + "*": ["prettier --ignore-unknown --write"] } diff --git a/.prettierignore b/.prettierignore index 6d600d2aad..aa68052ce1 100644 --- a/.prettierignore +++ b/.prettierignore @@ -24,5 +24,5 @@ website/versioned_sidebars/*.json examples/ website/static/katex/katex.min.css -website/changelog/_swizzle_theme_tests +website/changelog website/_dogfooding/_swizzle_theme_tests diff --git a/admin/new.docusaurus.io/package.json b/admin/new.docusaurus.io/package.json index 7bcba51a39..9ff47cd054 100644 --- a/admin/new.docusaurus.io/package.json +++ b/admin/new.docusaurus.io/package.json @@ -1,6 +1,6 @@ { "name": "new.docusaurus.io", - "version": "3.0.1", + "version": "3.1.0", "private": true, "scripts": { "start": "npx --package netlify-cli netlify dev" diff --git a/argos/package.json b/argos/package.json index 286bcd960d..437f323dcd 100644 --- a/argos/package.json +++ b/argos/package.json @@ -1,6 +1,6 @@ { "name": "argos", - "version": "3.0.1", + "version": "3.1.0", "description": "Argos visual diff tests", "license": "MIT", "private": true, diff --git a/lerna.json b/lerna.json index 86e15ecaf8..54946d7974 100644 --- a/lerna.json +++ b/lerna.json @@ -1,5 +1,5 @@ { - "version": "3.0.1", + "version": "3.1.0", "npmClient": "yarn", "useWorkspaces": true, "useNx": false, diff --git a/package.json b/package.json index 66548a0dd3..580d85d1ea 100644 --- a/package.json +++ b/package.json @@ -51,8 +51,11 @@ "lint": "yarn lint:js && yarn lint:style && yarn lint:spelling", "lint:ci": "yarn lint:js --quiet && yarn lint:style && yarn lint:spelling", "lint:js": "eslint --cache --report-unused-disable-directives \"**/*.{js,jsx,ts,tsx,mjs}\"", - "lint:spelling": "cspell \"**\" --no-progress", + "lint:js:fix": "yarn lint:js --fix", + "lint:spelling": "cspell \"**\" --no-progress --show-context --show-suggestions", + "lint:spelling:fix": "yarn rimraf project-words.txt && echo \"# Project Words - DO NOT TOUCH - This is updated through CI\" >> project-words.txt && yarn -s lint:spelling --words-only --unique --no-exit-code --no-summary \"**\" | sort --ignore-case >> project-words.txt", "lint:style": "stylelint \"**/*.css\"", + "lint:style:fix": "yarn lint:style --fix", "lerna": "lerna", "test": "jest", "test:build:website": "./admin/scripts/test-release.sh", @@ -80,7 +83,7 @@ "@typescript-eslint/eslint-plugin": "^5.62.0", "@typescript-eslint/parser": "^5.62.0", "cross-env": "^7.0.3", - "cspell": "^6.31.2", + "cspell": "^8.1.0", "eslint": "^8.45.0", "eslint-config-airbnb": "^19.0.4", "eslint-config-prettier": "^8.8.0", diff --git a/packages/create-docusaurus/README.md b/packages/create-docusaurus/README.md index f045003ad5..2533546271 100644 --- a/packages/create-docusaurus/README.md +++ b/packages/create-docusaurus/README.md @@ -10,6 +10,10 @@ npm init docusaurus yarn create docusaurus ``` +```bash +npx create-docusaurus@latest +``` + ## Usage Please see the [installation documentation](https://docusaurus.io/docs/installation). diff --git a/packages/create-docusaurus/package.json b/packages/create-docusaurus/package.json index b2360d6537..9722cf2c6a 100755 --- a/packages/create-docusaurus/package.json +++ b/packages/create-docusaurus/package.json @@ -1,6 +1,6 @@ { "name": "create-docusaurus", - "version": "3.0.1", + "version": "3.1.0", "description": "Create Docusaurus apps easily.", "type": "module", "repository": { @@ -22,8 +22,8 @@ }, "license": "MIT", "dependencies": { - "@docusaurus/logger": "3.0.1", - "@docusaurus/utils": "3.0.1", + "@docusaurus/logger": "3.1.0", + "@docusaurus/utils": "3.1.0", "commander": "^5.1.0", "fs-extra": "^11.1.1", "lodash": "^4.17.21", diff --git a/packages/create-docusaurus/templates/classic-typescript/package.json b/packages/create-docusaurus/templates/classic-typescript/package.json index c4d7834d5a..b9f64f45d1 100644 --- a/packages/create-docusaurus/templates/classic-typescript/package.json +++ b/packages/create-docusaurus/templates/classic-typescript/package.json @@ -1,6 +1,6 @@ { "name": "docusaurus-2-classic-typescript-template", - "version": "3.0.1", + "version": "3.1.0", "private": true, "scripts": { "docusaurus": "docusaurus", @@ -15,8 +15,8 @@ "typecheck": "tsc" }, "dependencies": { - "@docusaurus/core": "3.0.1", - "@docusaurus/preset-classic": "3.0.1", + "@docusaurus/core": "3.1.0", + "@docusaurus/preset-classic": "3.1.0", "@mdx-js/react": "^3.0.0", "clsx": "^2.0.0", "prism-react-renderer": "^2.3.0", @@ -24,9 +24,9 @@ "react-dom": "^18.0.0" }, "devDependencies": { - "@docusaurus/module-type-aliases": "3.0.1", - "@docusaurus/tsconfig": "3.0.1", - "@docusaurus/types": "3.0.1", + "@docusaurus/module-type-aliases": "3.1.0", + "@docusaurus/tsconfig": "3.1.0", + "@docusaurus/types": "3.1.0", "typescript": "~5.2.2" }, "browserslist": { diff --git a/packages/create-docusaurus/templates/classic/package.json b/packages/create-docusaurus/templates/classic/package.json index d13c4f1542..cd100edb37 100644 --- a/packages/create-docusaurus/templates/classic/package.json +++ b/packages/create-docusaurus/templates/classic/package.json @@ -1,6 +1,6 @@ { "name": "docusaurus-2-classic-template", - "version": "3.0.1", + "version": "3.1.0", "private": true, "scripts": { "docusaurus": "docusaurus", @@ -14,8 +14,8 @@ "write-heading-ids": "docusaurus write-heading-ids" }, "dependencies": { - "@docusaurus/core": "3.0.1", - "@docusaurus/preset-classic": "3.0.1", + "@docusaurus/core": "3.1.0", + "@docusaurus/preset-classic": "3.1.0", "@mdx-js/react": "^3.0.0", "clsx": "^2.0.0", "prism-react-renderer": "^2.3.0", @@ -23,8 +23,8 @@ "react-dom": "^18.0.0" }, "devDependencies": { - "@docusaurus/module-type-aliases": "3.0.1", - "@docusaurus/types": "3.0.1" + "@docusaurus/module-type-aliases": "3.1.0", + "@docusaurus/types": "3.1.0" }, "browserslist": { "production": [ diff --git a/packages/create-docusaurus/templates/shared/docs/tutorial-basics/markdown-features.mdx b/packages/create-docusaurus/templates/shared/docs/tutorial-basics/markdown-features.mdx index 0337f34d6a..35e00825ed 100644 --- a/packages/create-docusaurus/templates/shared/docs/tutorial-basics/markdown-features.mdx +++ b/packages/create-docusaurus/templates/shared/docs/tutorial-basics/markdown-features.mdx @@ -61,13 +61,13 @@ You can reference images relative to the current file as well. This is particula Markdown code blocks are supported with Syntax highlighting. - ```jsx title="src/components/HelloDocusaurus.js" - function HelloDocusaurus() { - return ( -
;
+}
diff --git a/packages/docusaurus-theme-classic/src/theme/Heading/index.tsx b/packages/docusaurus-theme-classic/src/theme/Heading/index.tsx
index 9ca1bc3f0d..b17cd12ec3 100644
--- a/packages/docusaurus-theme-classic/src/theme/Heading/index.tsx
+++ b/packages/docusaurus-theme-classic/src/theme/Heading/index.tsx
@@ -10,11 +10,13 @@ import clsx from 'clsx';
import {translate} from '@docusaurus/Translate';
import {useThemeConfig} from '@docusaurus/theme-common';
import Link from '@docusaurus/Link';
+import useBrokenLinks from '@docusaurus/useBrokenLinks';
import type {Props} from '@theme/Heading';
import styles from './styles.module.css';
export default function Heading({as: As, id, ...props}: Props): JSX.Element {
+ const brokenLinks = useBrokenLinks();
const {
navbar: {hideOnScroll},
} = useThemeConfig();
@@ -23,6 +25,8 @@ export default function Heading({as: As, id, ...props}: Props): JSX.Element {
return
+export default function MDXCode(props: Props): JSX.Element {
+ return shouldBeInline(props) ? (
+ ["'])(?.*?)\1/; const metastringLinesRangeRegex = /\{(? [\d,-]+)\}/; // Supported types of highlight comments -const commentPatterns = { +const popularCommentPatterns = { js: {start: '\\/\\/', end: ''}, jsBlock: {start: '\\/\\*', end: '\\*\\/'}, jsx: {start: '\\{\\s*\\/\\*', end: '\\*\\/\\s*\\}'}, bash: {start: '#', end: ''}, html: {start: ''}, +} as const; + +const commentPatterns = { + ...popularCommentPatterns, // shallow copy is sufficient + // minor comment styles lua: {start: '--', end: ''}, wasm: {start: '\\;\\;', end: ''}, tex: {start: '%', end: ''}, -}; + vb: {start: "['‘’]", end: ''}, + rem: {start: '[Rr][Ee][Mm]\\b', end: ''}, + f90: {start: '!', end: ''}, // Free format only + ml: {start: '\\(\\*', end: '\\*\\)'}, + cobol: {start: '\\*>', end: ''}, // Free format only +} as const; type CommentType = keyof typeof commentPatterns; +const popularCommentTypes = Object.keys( + popularCommentPatterns, +) as CommentType[]; export type MagicCommentConfig = { className: string; @@ -99,15 +112,34 @@ function getAllMagicCommentDirectiveStyles( case 'wasm': return getCommentPattern(['wasm'], magicCommentDirectives); + case 'vb': + case 'vbnet': + case 'vba': + case 'visual-basic': + return getCommentPattern(['vb', 'rem'], magicCommentDirectives); + + case 'batch': + return getCommentPattern(['rem'], magicCommentDirectives); + + case 'basic': // https://github.com/PrismJS/prism/blob/master/components/prism-basic.js#L3 + return getCommentPattern(['rem', 'f90'], magicCommentDirectives); + + case 'fsharp': + return getCommentPattern(['js', 'ml'], magicCommentDirectives); + + case 'ocaml': + case 'sml': + return getCommentPattern(['ml'], magicCommentDirectives); + + case 'fortran': + return getCommentPattern(['f90'], magicCommentDirectives); + + case 'cobol': + return getCommentPattern(['cobol'], magicCommentDirectives); + default: - // All comment types except lua, wasm and matlab - return getCommentPattern( - Object.keys(commentPatterns).filter( - (pattern) => - !['lua', 'wasm', 'tex', 'latex', 'matlab'].includes(pattern), - ) as CommentType[], - magicCommentDirectives, - ); + // All popular comment types + return getCommentPattern(popularCommentTypes, magicCommentDirectives); } } diff --git a/packages/docusaurus-theme-live-codeblock/package.json b/packages/docusaurus-theme-live-codeblock/package.json index 07c0736bc3..98f02045ef 100644 --- a/packages/docusaurus-theme-live-codeblock/package.json +++ b/packages/docusaurus-theme-live-codeblock/package.json @@ -1,6 +1,6 @@ { "name": "@docusaurus/theme-live-codeblock", - "version": "3.0.1", + "version": "3.1.0", "description": "Docusaurus live code block component.", "main": "lib/index.js", "types": "src/theme-live-codeblock.d.ts", @@ -23,10 +23,10 @@ }, "license": "MIT", "dependencies": { - "@docusaurus/core": "3.0.1", - "@docusaurus/theme-common": "3.0.1", - "@docusaurus/theme-translations": "3.0.1", - "@docusaurus/utils-validation": "3.0.1", + "@docusaurus/core": "3.1.0", + "@docusaurus/theme-common": "3.1.0", + "@docusaurus/theme-translations": "3.1.0", + "@docusaurus/utils-validation": "3.1.0", "@philpl/buble": "^0.19.7", "clsx": "^2.0.0", "fs-extra": "^11.1.1", @@ -34,7 +34,7 @@ "tslib": "^2.6.0" }, "devDependencies": { - "@docusaurus/types": "3.0.1", + "@docusaurus/types": "3.1.0", "@types/buble": "^0.20.1" }, "peerDependencies": { diff --git a/packages/docusaurus-theme-live-codeblock/src/theme-live-codeblock.d.ts b/packages/docusaurus-theme-live-codeblock/src/theme-live-codeblock.d.ts index b3670f476d..12bd27e8c7 100644 --- a/packages/docusaurus-theme-live-codeblock/src/theme-live-codeblock.d.ts +++ b/packages/docusaurus-theme-live-codeblock/src/theme-live-codeblock.d.ts @@ -24,7 +24,8 @@ declare module '@theme/Playground' { type LiveProviderProps = React.ComponentProps ; export interface Props extends CodeBlockProps, LiveProviderProps { - children: string; + // Allow empty live playgrounds + children?: string; } export default function Playground(props: LiveProviderProps): JSX.Element; } diff --git a/packages/docusaurus-theme-live-codeblock/src/theme/Playground/index.tsx b/packages/docusaurus-theme-live-codeblock/src/theme/Playground/index.tsx index ae0b2c4023..1f36e69f57 100644 --- a/packages/docusaurus-theme-live-codeblock/src/theme/Playground/index.tsx +++ b/packages/docusaurus-theme-live-codeblock/src/theme/Playground/index.tsx @@ -98,6 +98,10 @@ function EditorWithHeader() { ); } +// this should rather be a stable function +// see https://github.com/facebook/docusaurus/issues/9630#issuecomment-1855682643 +const DEFAULT_TRANSFORM_CODE = (code: string) => `${code};`; + export default function Playground({ children, transformCode, @@ -116,9 +120,9 @@ export default function Playground({ return ( `${code};`)} + transformCode={transformCode ?? DEFAULT_TRANSFORM_CODE} theme={prismTheme} {...props}> {playgroundPosition === 'top' ? ( diff --git a/packages/docusaurus-theme-mermaid/package.json b/packages/docusaurus-theme-mermaid/package.json index fe3aa5ddd0..d817de8ec4 100644 --- a/packages/docusaurus-theme-mermaid/package.json +++ b/packages/docusaurus-theme-mermaid/package.json @@ -1,6 +1,6 @@ { "name": "@docusaurus/theme-mermaid", - "version": "3.0.1", + "version": "3.1.0", "description": "Mermaid components for Docusaurus.", "main": "lib/index.js", "types": "src/theme-mermaid.d.ts", @@ -33,11 +33,11 @@ "copy:watch": "node ../../admin/scripts/copyUntypedFiles.js --watch" }, "dependencies": { - "@docusaurus/core": "3.0.1", - "@docusaurus/module-type-aliases": "3.0.1", - "@docusaurus/theme-common": "3.0.1", - "@docusaurus/types": "3.0.1", - "@docusaurus/utils-validation": "3.0.1", + "@docusaurus/core": "3.1.0", + "@docusaurus/module-type-aliases": "3.1.0", + "@docusaurus/theme-common": "3.1.0", + "@docusaurus/types": "3.1.0", + "@docusaurus/utils-validation": "3.1.0", "mermaid": "^10.4.0", "tslib": "^2.6.0" }, diff --git a/packages/docusaurus-theme-search-algolia/package.json b/packages/docusaurus-theme-search-algolia/package.json index 59773fc351..94e8314389 100644 --- a/packages/docusaurus-theme-search-algolia/package.json +++ b/packages/docusaurus-theme-search-algolia/package.json @@ -1,6 +1,6 @@ { "name": "@docusaurus/theme-search-algolia", - "version": "3.0.1", + "version": "3.1.0", "description": "Algolia search component for Docusaurus.", "main": "lib/index.js", "sideEffects": [ @@ -34,13 +34,13 @@ }, "dependencies": { "@docsearch/react": "^3.5.2", - "@docusaurus/core": "3.0.1", - "@docusaurus/logger": "3.0.1", - "@docusaurus/plugin-content-docs": "3.0.1", - "@docusaurus/theme-common": "3.0.1", - "@docusaurus/theme-translations": "3.0.1", - "@docusaurus/utils": "3.0.1", - "@docusaurus/utils-validation": "3.0.1", + "@docusaurus/core": "3.1.0", + "@docusaurus/logger": "3.1.0", + "@docusaurus/plugin-content-docs": "3.1.0", + "@docusaurus/theme-common": "3.1.0", + "@docusaurus/theme-translations": "3.1.0", + "@docusaurus/utils": "3.1.0", + "@docusaurus/utils-validation": "3.1.0", "algoliasearch": "^4.18.0", "algoliasearch-helper": "^3.13.3", "clsx": "^2.0.0", @@ -51,7 +51,7 @@ "utility-types": "^3.10.0" }, "devDependencies": { - "@docusaurus/module-type-aliases": "3.0.1" + "@docusaurus/module-type-aliases": "3.1.0" }, "peerDependencies": { "react": "^18.0.0", diff --git a/packages/docusaurus-theme-search-algolia/src/theme/SearchBar/index.tsx b/packages/docusaurus-theme-search-algolia/src/theme/SearchBar/index.tsx index 0f4d7ee000..9a3a4ae3bc 100644 --- a/packages/docusaurus-theme-search-algolia/src/theme/SearchBar/index.tsx +++ b/packages/docusaurus-theme-search-algolia/src/theme/SearchBar/index.tsx @@ -6,6 +6,7 @@ */ import React, {useCallback, useMemo, useRef, useState} from 'react'; +import {createPortal} from 'react-dom'; import {DocSearchButton, useDocSearchKeyboardEvents} from '@docsearch/react'; import Head from '@docusaurus/Head'; import Link from '@docusaurus/Link'; @@ -20,7 +21,6 @@ import { } from '@docusaurus/theme-search-algolia/client'; import Translate from '@docusaurus/Translate'; import useDocusaurusContext from '@docusaurus/useDocusaurusContext'; -import {createPortal} from 'react-dom'; import translations from '@theme/SearchTranslations'; import type {AutocompleteState} from '@algolia/autocomplete-core'; diff --git a/packages/docusaurus-theme-translations/locales/pt-BR/theme-common.json b/packages/docusaurus-theme-translations/locales/pt-BR/theme-common.json index edf86395d9..3c930a6ee1 100644 --- a/packages/docusaurus-theme-translations/locales/pt-BR/theme-common.json +++ b/packages/docusaurus-theme-translations/locales/pt-BR/theme-common.json @@ -4,22 +4,22 @@ "theme.CodeBlock.copied": "Copiado", "theme.CodeBlock.copy": "Copiar", "theme.CodeBlock.copyButtonAriaLabel": "Copiar código para a área de transferência", - "theme.CodeBlock.wordWrapToggle": "Toggle word wrap", - "theme.DocSidebarItem.collapseCategoryAriaLabel": "Collapse sidebar category '{label}'", - "theme.DocSidebarItem.expandCategoryAriaLabel": "Expand sidebar category '{label}'", - "theme.ErrorPageContent.title": "This page crashed.", - "theme.ErrorPageContent.tryAgain": "Try again", + "theme.CodeBlock.wordWrapToggle": "Alternar quebra de linha", + "theme.DocSidebarItem.collapseCategoryAriaLabel": "Fechar a categoria lateral '{label}'", + "theme.DocSidebarItem.expandCategoryAriaLabel": "Expandir a categoria lateral '{label}'", + "theme.ErrorPageContent.title": "Esta página deu erro.", + "theme.ErrorPageContent.tryAgain": "Tente novamente", "theme.NavBar.navAriaLabel": "Main", "theme.NotFound.p1": "Não foi possível encontrar o que você está procurando.", "theme.NotFound.p2": "Entre em contato com o proprietário do site que lhe trouxe para cá e lhe informe que o link está quebrado.", "theme.NotFound.title": "Página não encontrada", "theme.TOCCollapsible.toggleButtonLabel": "Nessa página", - "theme.admonition.caution": "caution", - "theme.admonition.danger": "danger", + "theme.admonition.caution": "cuidado", + "theme.admonition.danger": "perigo", "theme.admonition.info": "info", - "theme.admonition.note": "note", - "theme.admonition.tip": "tip", - "theme.admonition.warning": "warning", + "theme.admonition.note": "nota", + "theme.admonition.tip": "dica", + "theme.admonition.warning": "atenção", "theme.blog.archive.description": "Arquivo", "theme.blog.archive.title": "Arquivo", "theme.blog.paginator.navAriaLabel": "Navegação da página de listagem do blog", @@ -30,32 +30,32 @@ "theme.blog.post.paginator.olderPost": "Postagem mais antiga", "theme.blog.post.plurals": "Uma postagem|{count} postagens", "theme.blog.post.readMore": "Leia Mais", - "theme.blog.post.readMoreLabel": "Read more about {title}", + "theme.blog.post.readMoreLabel": "Ler mais sobre {title}", "theme.blog.post.readingTime.plurals": "Leitura de um minuto|Leitura de {readingTime} minutos", "theme.blog.sidebar.navAriaLabel": "Blog recent posts navigation", "theme.blog.tagTitle": "{nPosts} marcadas com \"{tagName}\"", - "theme.colorToggle.ariaLabel": "Switch between dark and light mode (currently {mode})", - "theme.colorToggle.ariaLabel.mode.dark": "dark mode", - "theme.colorToggle.ariaLabel.mode.light": "light mode", + "theme.colorToggle.ariaLabel": "Alterar entre os modos claro e escuro (modo {mode} ativado)", + "theme.colorToggle.ariaLabel.mode.dark": "modo escuro", + "theme.colorToggle.ariaLabel.mode.light": "modo claro", "theme.common.editThisPage": "Editar essa página", "theme.common.headingLinkTitle": "Link direto para {heading}", "theme.common.skipToMainContent": "Pular para o conteúdo principal", "theme.docs.DocCard.categoryDescription": "{count} items", - "theme.docs.breadcrumbs.home": "Home page", + "theme.docs.breadcrumbs.home": "Página Inicial", "theme.docs.breadcrumbs.navAriaLabel": "Breadcrumbs", "theme.docs.paginator.navAriaLabel": "Páginas de documentação", "theme.docs.paginator.next": "Próxima", "theme.docs.paginator.previous": "Anterior", - "theme.docs.sidebar.closeSidebarButtonAriaLabel": "Close navigation bar", + "theme.docs.sidebar.closeSidebarButtonAriaLabel": "Fechar barra de navegação", "theme.docs.sidebar.collapseButtonAriaLabel": "Fechar painel lateral", "theme.docs.sidebar.collapseButtonTitle": "Fechar painel lateral", "theme.docs.sidebar.expandButtonAriaLabel": "Expandir painel lateral", "theme.docs.sidebar.expandButtonTitle": "Expandir painel lateral", "theme.docs.sidebar.navAriaLabel": "Docs sidebar", - "theme.docs.sidebar.toggleSidebarButtonAriaLabel": "Toggle navigation bar", + "theme.docs.sidebar.toggleSidebarButtonAriaLabel": "Alternar a barra de navegação", "theme.docs.tagDocListPageTitle": "{nDocsTagged} com \"{tagName}\"", "theme.docs.tagDocListPageTitle.nDocsTagged": "Um documento selecionado|{count} documentos selecionados", - "theme.docs.versionBadge.label": "Version: {versionLabel}", + "theme.docs.versionBadge.label": "Versão: {versionLabel}", "theme.docs.versions.latestVersionLinkLabel": "última versão", "theme.docs.versions.latestVersionSuggestionLabel": "Para a documentação atualizada, veja: {latestVersionLink} ({versionLabel}).", "theme.docs.versions.unmaintainedVersionLabel": "Esta é a documentação para {siteTitle} {versionLabel}, que não é mais mantida ativamente.", @@ -63,12 +63,12 @@ "theme.lastUpdated.atDate": " em {date}", "theme.lastUpdated.byUser": " por {user}", "theme.lastUpdated.lastUpdatedAtBy": "Última atualização {atDate}{byUser}", - "theme.navbar.mobileLanguageDropdown.label": "Languages", + "theme.navbar.mobileLanguageDropdown.label": "Linguagens", "theme.navbar.mobileSidebarSecondaryMenu.backButtonLabel": "← Voltar para o menu principal", - "theme.navbar.mobileVersionsDropdown.label": "Versions", + "theme.navbar.mobileVersionsDropdown.label": "Versões", "theme.tags.tagsListLabel": "Marcadores:", "theme.tags.tagsPageLink": "Ver todas os Marcadores", "theme.tags.tagsPageTitle": "Marcadores", "theme.unlistedContent.message": "This page is unlisted. Search engines will not index it, and only users having a direct link can access it.", - "theme.unlistedContent.title": "Unlisted page" + "theme.unlistedContent.title": "Página não listada" } diff --git a/packages/docusaurus-theme-translations/package.json b/packages/docusaurus-theme-translations/package.json index 795bd3e964..14d099b24e 100644 --- a/packages/docusaurus-theme-translations/package.json +++ b/packages/docusaurus-theme-translations/package.json @@ -1,6 +1,6 @@ { "name": "@docusaurus/theme-translations", - "version": "3.0.1", + "version": "3.1.0", "description": "Docusaurus theme translations.", "main": "lib/index.js", "types": "lib/index.d.ts", @@ -23,8 +23,8 @@ "tslib": "^2.6.0" }, "devDependencies": { - "@docusaurus/core": "3.0.1", - "@docusaurus/logger": "3.0.1", + "@docusaurus/core": "3.1.0", + "@docusaurus/logger": "3.1.0", "lodash": "^4.17.21" }, "engines": { diff --git a/packages/docusaurus-tsconfig/package.json b/packages/docusaurus-tsconfig/package.json index 3a716a9302..ae0132a236 100644 --- a/packages/docusaurus-tsconfig/package.json +++ b/packages/docusaurus-tsconfig/package.json @@ -1,6 +1,6 @@ { "name": "@docusaurus/tsconfig", - "version": "3.0.1", + "version": "3.1.0", "description": "Docusaurus base TypeScript configuration.", "main": "tsconfig.json", "publishConfig": { diff --git a/packages/docusaurus-types/package.json b/packages/docusaurus-types/package.json index 9de288df69..4c4e6944ea 100644 --- a/packages/docusaurus-types/package.json +++ b/packages/docusaurus-types/package.json @@ -1,6 +1,6 @@ { "name": "@docusaurus/types", - "version": "3.0.1", + "version": "3.1.0", "description": "Common types for Docusaurus packages.", "types": "./src/index.d.ts", "publishConfig": { @@ -13,6 +13,7 @@ }, "license": "MIT", "dependencies": { + "@mdx-js/mdx": "^3.0.0", "@types/history": "^4.7.11", "@types/react": "*", "commander": "^5.1.0", diff --git a/packages/docusaurus-types/src/config.d.ts b/packages/docusaurus-types/src/config.d.ts index 3a7bb99ae7..47bfde898d 100644 --- a/packages/docusaurus-types/src/config.d.ts +++ b/packages/docusaurus-types/src/config.d.ts @@ -10,6 +10,10 @@ import type {Required as RequireKeys, DeepPartial} from 'utility-types'; import type {I18nConfig} from './i18n'; import type {PluginConfig, PresetConfig, HtmlTagObject} from './plugin'; +import type {ProcessorOptions} from '@mdx-js/mdx'; + +export type RemarkRehypeOptions = ProcessorOptions['remarkRehypeOptions']; + export type ReportingSeverity = 'ignore' | 'log' | 'warn' | 'throw'; export type ThemeConfig = { @@ -27,6 +31,20 @@ export type MDX1CompatOptions = { headingIds: boolean; }; +export type ParseFrontMatterParams = {filePath: string; fileContent: string}; +export type ParseFrontMatterResult = { + frontMatter: {[key: string]: unknown}; + content: string; +}; +export type DefaultParseFrontMatter = ( + params: ParseFrontMatterParams, +) => Promise ; +export type ParseFrontMatter = ( + params: ParseFrontMatterParams & { + defaultParseFrontMatter: DefaultParseFrontMatter; + }, +) => Promise ; + export type MarkdownConfig = { /** * The Markdown format to use by default. @@ -44,6 +62,14 @@ export type MarkdownConfig = { */ format: 'mdx' | 'md' | 'detect'; + /** + * A function callback that lets users parse the front matter themselves. + * Gives the opportunity to read it from a different source, or process it. + * + * @see https://github.com/facebook/docusaurus/issues/5568 + */ + parseFrontMatter: ParseFrontMatter; + /** * Allow mermaid language code blocks to be rendered into Mermaid diagrams: * @@ -69,6 +95,12 @@ export type MarkdownConfig = { * See also https://github.com/facebook/docusaurus/issues/4029 */ mdx1Compat: MDX1CompatOptions; + + /** + * Ability to provide custom remark-rehype options + * See also https://github.com/remarkjs/remark-rehype#options + */ + remarkRehypeOptions: RemarkRehypeOptions; }; /** @@ -143,6 +175,13 @@ export type DocusaurusConfig = { * @default "throw" */ onBrokenLinks: ReportingSeverity; + /** + * The behavior of Docusaurus when it detects any broken link. + * + * @see https://docusaurus.io/docs/api/docusaurus-config#onBrokenAnchors + * @default "warn" + */ + onBrokenAnchors: ReportingSeverity; /** * The behavior of Docusaurus when it detects any broken markdown link. * diff --git a/packages/docusaurus-types/src/index.d.ts b/packages/docusaurus-types/src/index.d.ts index 53e83ce963..257ec57811 100644 --- a/packages/docusaurus-types/src/index.d.ts +++ b/packages/docusaurus-types/src/index.d.ts @@ -9,6 +9,8 @@ export { ReportingSeverity, ThemeConfig, MarkdownConfig, + DefaultParseFrontMatter, + ParseFrontMatter, DocusaurusConfig, Config, } from './config'; diff --git a/packages/docusaurus-utils-common/package.json b/packages/docusaurus-utils-common/package.json index 73c75cd249..4b453ad410 100644 --- a/packages/docusaurus-utils-common/package.json +++ b/packages/docusaurus-utils-common/package.json @@ -1,6 +1,6 @@ { "name": "@docusaurus/utils-common", - "version": "3.0.1", + "version": "3.1.0", "description": "Common (Node/Browser) utility functions for Docusaurus packages.", "main": "./lib/index.js", "types": "./lib/index.d.ts", diff --git a/packages/docusaurus-utils-validation/package.json b/packages/docusaurus-utils-validation/package.json index 8b2c4966b5..d60487f6d7 100644 --- a/packages/docusaurus-utils-validation/package.json +++ b/packages/docusaurus-utils-validation/package.json @@ -1,6 +1,6 @@ { "name": "@docusaurus/utils-validation", - "version": "3.0.1", + "version": "3.1.0", "description": "Node validation utility functions for Docusaurus packages.", "main": "./lib/index.js", "types": "./lib/index.d.ts", @@ -18,8 +18,8 @@ }, "license": "MIT", "dependencies": { - "@docusaurus/logger": "3.0.1", - "@docusaurus/utils": "3.0.1", + "@docusaurus/logger": "3.1.0", + "@docusaurus/utils": "3.1.0", "joi": "^17.9.2", "js-yaml": "^4.1.0", "tslib": "^2.6.0" diff --git a/packages/docusaurus-utils/package.json b/packages/docusaurus-utils/package.json index 5d716f418f..c1c49b4a61 100644 --- a/packages/docusaurus-utils/package.json +++ b/packages/docusaurus-utils/package.json @@ -1,6 +1,6 @@ { "name": "@docusaurus/utils", - "version": "3.0.1", + "version": "3.1.0", "description": "Node utility functions for Docusaurus packages.", "main": "./lib/index.js", "types": "./lib/index.d.ts", @@ -18,7 +18,7 @@ }, "license": "MIT", "dependencies": { - "@docusaurus/logger": "3.0.1", + "@docusaurus/logger": "3.1.0", "@svgr/webpack": "^6.5.1", "escape-string-regexp": "^4.0.0", "file-loader": "^6.2.0", @@ -40,7 +40,7 @@ "node": ">=18.0" }, "devDependencies": { - "@docusaurus/types": "3.0.1", + "@docusaurus/types": "3.1.0", "@types/dedent": "^0.7.0", "@types/github-slugger": "^1.3.0", "@types/micromatch": "^4.0.2", diff --git a/packages/docusaurus-utils/src/__tests__/__snapshots__/markdownLinks.test.ts.snap b/packages/docusaurus-utils/src/__tests__/__snapshots__/markdownLinks.test.ts.snap index 7c5fc87428..3c4f732a82 100644 --- a/packages/docusaurus-utils/src/__tests__/__snapshots__/markdownLinks.test.ts.snap +++ b/packages/docusaurus-utils/src/__tests__/__snapshots__/markdownLinks.test.ts.snap @@ -176,6 +176,7 @@ exports[`replaceMarkdownLinks replaces links with same title as URL 1`] = ` "brokenMarkdownLinks": [], "newContent": " [foo.md](/docs/foo) +[./foo.md]() [./foo.md](/docs/foo) [foo.md](/docs/foo) [./foo.md](/docs/foo) diff --git a/packages/docusaurus-utils/src/__tests__/__snapshots__/markdownUtils.test.ts.snap b/packages/docusaurus-utils/src/__tests__/__snapshots__/markdownUtils.test.ts.snap index 5a65f0bb82..8fb7a03dfa 100644 --- a/packages/docusaurus-utils/src/__tests__/__snapshots__/markdownUtils.test.ts.snap +++ b/packages/docusaurus-utils/src/__tests__/__snapshots__/markdownUtils.test.ts.snap @@ -1,6 +1,6 @@ // Jest Snapshot v1, https://goo.gl/fbAQLP -exports[`parseMarkdownString deletes only first heading 1`] = ` +exports[`parseMarkdownFile deletes only first heading 1`] = ` { "content": "# Markdown Title @@ -15,7 +15,7 @@ test test test # test bar } `; -exports[`parseMarkdownString deletes only first heading 2 1`] = ` +exports[`parseMarkdownFile deletes only first heading 2 1`] = ` { "content": "# test @@ -30,7 +30,7 @@ test3", } `; -exports[`parseMarkdownString does not warn for duplicate title if markdown title is not at the top 1`] = ` +exports[`parseMarkdownFile does not warn for duplicate title if markdown title is not at the top 1`] = ` { "content": "foo @@ -43,7 +43,7 @@ exports[`parseMarkdownString does not warn for duplicate title if markdown title } `; -exports[`parseMarkdownString handles code blocks 1`] = ` +exports[`parseMarkdownFile handles code blocks 1`] = ` { "content": "\`\`\`js code @@ -56,7 +56,7 @@ Content", } `; -exports[`parseMarkdownString handles code blocks 2`] = ` +exports[`parseMarkdownFile handles code blocks 2`] = ` { "content": "\`\`\`\`js Foo @@ -73,7 +73,7 @@ Content", } `; -exports[`parseMarkdownString handles code blocks 3`] = ` +exports[`parseMarkdownFile handles code blocks 3`] = ` { "content": "\`\`\`\`js Foo @@ -88,7 +88,7 @@ Content", } `; -exports[`parseMarkdownString ignores markdown title if its not a first text 1`] = ` +exports[`parseMarkdownFile ignores markdown title if its not a first text 1`] = ` { "content": "foo # test", @@ -98,7 +98,21 @@ exports[`parseMarkdownString ignores markdown title if its not a first text 1`] } `; -exports[`parseMarkdownString parse markdown with front matter 1`] = ` +exports[`parseMarkdownFile parse markdown with custom front matter parser 1`] = ` +{ + "content": "Some text", + "contentTitle": undefined, + "excerpt": "Some text", + "frontMatter": { + "age": 84, + "extra": "value", + "great": true, + "title": "Frontmatter title", + }, +} +`; + +exports[`parseMarkdownFile parse markdown with front matter 1`] = ` { "content": "Some text", "contentTitle": undefined, @@ -109,7 +123,7 @@ exports[`parseMarkdownString parse markdown with front matter 1`] = ` } `; -exports[`parseMarkdownString parses first heading as contentTitle 1`] = ` +exports[`parseMarkdownFile parses first heading as contentTitle 1`] = ` { "content": "# Markdown Title @@ -120,7 +134,7 @@ Some text", } `; -exports[`parseMarkdownString parses front-matter and ignore h2 1`] = ` +exports[`parseMarkdownFile parses front-matter and ignore h2 1`] = ` { "content": "## test", "contentTitle": undefined, @@ -131,7 +145,7 @@ exports[`parseMarkdownString parses front-matter and ignore h2 1`] = ` } `; -exports[`parseMarkdownString parses title only 1`] = ` +exports[`parseMarkdownFile parses title only 1`] = ` { "content": "# test", "contentTitle": "test", @@ -140,7 +154,7 @@ exports[`parseMarkdownString parses title only 1`] = ` } `; -exports[`parseMarkdownString parses title only alternate 1`] = ` +exports[`parseMarkdownFile parses title only alternate 1`] = ` { "content": "test ===", @@ -150,7 +164,7 @@ exports[`parseMarkdownString parses title only alternate 1`] = ` } `; -exports[`parseMarkdownString reads front matter only 1`] = ` +exports[`parseMarkdownFile reads front matter only 1`] = ` { "content": "", "contentTitle": undefined, @@ -161,7 +175,7 @@ exports[`parseMarkdownString reads front matter only 1`] = ` } `; -exports[`parseMarkdownString warns about duplicate titles (front matter + markdown alternate) 1`] = ` +exports[`parseMarkdownFile warns about duplicate titles (front matter + markdown alternate) 1`] = ` { "content": "Markdown Title alternate ================ @@ -175,7 +189,7 @@ Some text", } `; -exports[`parseMarkdownString warns about duplicate titles (front matter + markdown) 1`] = ` +exports[`parseMarkdownFile warns about duplicate titles (front matter + markdown) 1`] = ` { "content": "# Markdown Title @@ -188,7 +202,7 @@ Some text", } `; -exports[`parseMarkdownString warns about duplicate titles 1`] = ` +exports[`parseMarkdownFile warns about duplicate titles 1`] = ` { "content": "# test", "contentTitle": "test", diff --git a/packages/docusaurus-utils/src/__tests__/markdownLinks.test.ts b/packages/docusaurus-utils/src/__tests__/markdownLinks.test.ts index b7d7abd556..ce0acfb3a9 100644 --- a/packages/docusaurus-utils/src/__tests__/markdownLinks.test.ts +++ b/packages/docusaurus-utils/src/__tests__/markdownLinks.test.ts @@ -231,6 +231,7 @@ The following operations are defined for [URI]s: }, fileString: ` [foo.md](foo.md) +[./foo.md](<./foo.md>) [./foo.md](./foo.md) [foo.md](./foo.md) [./foo.md](foo.md) diff --git a/packages/docusaurus-utils/src/__tests__/markdownUtils.test.ts b/packages/docusaurus-utils/src/__tests__/markdownUtils.test.ts index 182c95b05f..0e04dbf5c2 100644 --- a/packages/docusaurus-utils/src/__tests__/markdownUtils.test.ts +++ b/packages/docusaurus-utils/src/__tests__/markdownUtils.test.ts @@ -9,12 +9,14 @@ import dedent from 'dedent'; import { createExcerpt, parseMarkdownContentTitle, - parseMarkdownString, parseMarkdownHeadingId, writeMarkdownHeadingId, escapeMarkdownHeadingIds, unwrapMdxCodeBlocks, admonitionTitleToDirectiveLabel, + parseMarkdownFile, + DEFAULT_PARSE_FRONT_MATTER, + parseFileContentFrontMatter, } from '../markdownUtils'; describe('createExcerpt', () => { @@ -623,32 +625,110 @@ Lorem Ipsum }); }); -describe('parseMarkdownString', () => { - it('parse markdown with front matter', () => { - expect( - parseMarkdownString(dedent` +describe('parseFileContentFrontMatter', () => { + function test(fileContent: string) { + return parseFileContentFrontMatter(fileContent); + } + + it('can parse front matter', () => { + const input = dedent` + --- + title: Frontmatter title + author: + age: 42 + birth: 2000-07-23 + --- + + Some text + `; + + const expectedResult = { + content: 'Some text', + frontMatter: { + title: 'Frontmatter title', + author: {age: 42, birth: new Date('2000-07-23')}, + }, + }; + + const result = test(input) as typeof expectedResult; + expect(result).toEqual(expectedResult); + expect(result.frontMatter.author.birth).toBeInstanceOf(Date); + + // A regression test, ensure we don't return gray-matter cached objects + result.frontMatter.title = 'modified'; + // @ts-expect-error: ok + result.frontMatter.author.age = 53; + expect(test(input)).toEqual(expectedResult); + }); +}); + +describe('parseMarkdownFile', () => { + async function test( + fileContent: string, + options?: Partial >[0], + ) { + return parseMarkdownFile({ + fileContent, + filePath: 'some-file-path.mdx', + parseFrontMatter: DEFAULT_PARSE_FRONT_MATTER, + ...options, + }); + } + + it('parse markdown with front matter', async () => { + await expect( + test(dedent` --- title: Frontmatter title --- Some text `), - ).toMatchSnapshot(); + ).resolves.toMatchSnapshot(); }); - it('parses first heading as contentTitle', () => { - expect( - parseMarkdownString(dedent` + it('parse markdown with custom front matter parser', async () => { + await expect( + test( + dedent` + --- + title: Frontmatter title + age: 42 + --- + + Some text + `, + { + parseFrontMatter: async (params) => { + const result = await params.defaultParseFrontMatter(params); + return { + ...result, + frontMatter: { + ...result.frontMatter, + age: result.frontMatter.age * 2, + extra: 'value', + great: true, + }, + }; + }, + }, + ), + ).resolves.toMatchSnapshot(); + }); + + it('parses first heading as contentTitle', async () => { + await expect( + test(dedent` # Markdown Title Some text `), - ).toMatchSnapshot(); + ).resolves.toMatchSnapshot(); }); - it('warns about duplicate titles (front matter + markdown)', () => { - expect( - parseMarkdownString(dedent` + it('warns about duplicate titles (front matter + markdown)', async () => { + await expect( + test(dedent` --- title: Frontmatter title --- @@ -657,12 +737,12 @@ describe('parseMarkdownString', () => { Some text `), - ).toMatchSnapshot(); + ).resolves.toMatchSnapshot(); }); - it('warns about duplicate titles (front matter + markdown alternate)', () => { - expect( - parseMarkdownString(dedent` + it('warns about duplicate titles (front matter + markdown alternate)', async () => { + await expect( + test(dedent` --- title: Frontmatter title --- @@ -672,12 +752,12 @@ describe('parseMarkdownString', () => { Some text `), - ).toMatchSnapshot(); + ).resolves.toMatchSnapshot(); }); - it('does not warn for duplicate title if markdown title is not at the top', () => { - expect( - parseMarkdownString(dedent` + it('does not warn for duplicate title if markdown title is not at the top', async () => { + await expect( + test(dedent` --- title: Frontmatter title --- @@ -686,12 +766,12 @@ describe('parseMarkdownString', () => { # Markdown Title `), - ).toMatchSnapshot(); + ).resolves.toMatchSnapshot(); }); - it('deletes only first heading', () => { - expect( - parseMarkdownString(dedent` + it('deletes only first heading', async () => { + await expect( + test(dedent` # Markdown Title test test test # test bar @@ -700,12 +780,12 @@ describe('parseMarkdownString', () => { ### Markdown Title h3 `), - ).toMatchSnapshot(); + ).resolves.toMatchSnapshot(); }); - it('parses front-matter and ignore h2', () => { - expect( - parseMarkdownString( + it('parses front-matter and ignore h2', async () => { + await expect( + test( dedent` --- title: Frontmatter title @@ -713,55 +793,55 @@ describe('parseMarkdownString', () => { ## test `, ), - ).toMatchSnapshot(); + ).resolves.toMatchSnapshot(); }); - it('reads front matter only', () => { - expect( - parseMarkdownString(dedent` + it('reads front matter only', async () => { + await expect( + test(dedent` --- title: test --- `), - ).toMatchSnapshot(); + ).resolves.toMatchSnapshot(); }); - it('parses title only', () => { - expect(parseMarkdownString('# test')).toMatchSnapshot(); + it('parses title only', async () => { + await expect(test('# test')).resolves.toMatchSnapshot(); }); - it('parses title only alternate', () => { - expect( - parseMarkdownString(dedent` + it('parses title only alternate', async () => { + await expect( + test(dedent` test === `), - ).toMatchSnapshot(); + ).resolves.toMatchSnapshot(); }); - it('warns about duplicate titles', () => { - expect( - parseMarkdownString(dedent` + it('warns about duplicate titles', async () => { + await expect( + test(dedent` --- title: Frontmatter title --- # test `), - ).toMatchSnapshot(); + ).resolves.toMatchSnapshot(); }); - it('ignores markdown title if its not a first text', () => { - expect( - parseMarkdownString(dedent` + it('ignores markdown title if its not a first text', async () => { + await expect( + test(dedent` foo # test `), - ).toMatchSnapshot(); + ).resolves.toMatchSnapshot(); }); - it('deletes only first heading 2', () => { - expect( - parseMarkdownString(dedent` + it('deletes only first heading 2', async () => { + await expect( + test(dedent` # test test test test test test test @@ -770,21 +850,21 @@ describe('parseMarkdownString', () => { ### test test3 `), - ).toMatchSnapshot(); + ).resolves.toMatchSnapshot(); }); - it('handles code blocks', () => { - expect( - parseMarkdownString(dedent` + it('handles code blocks', async () => { + await expect( + test(dedent` \`\`\`js code \`\`\` Content `), - ).toMatchSnapshot(); - expect( - parseMarkdownString(dedent` + ).resolves.toMatchSnapshot(); + await expect( + test(dedent` \`\`\`\`js Foo \`\`\`diff @@ -795,9 +875,9 @@ describe('parseMarkdownString', () => { Content `), - ).toMatchSnapshot(); - expect( - parseMarkdownString(dedent` + ).resolves.toMatchSnapshot(); + await expect( + test(dedent` \`\`\`\`js Foo \`\`\`diff @@ -806,17 +886,17 @@ describe('parseMarkdownString', () => { Content `), - ).toMatchSnapshot(); + ).resolves.toMatchSnapshot(); }); - it('throws for invalid front matter', () => { - expect(() => - parseMarkdownString(dedent` + it('throws for invalid front matter', async () => { + await expect( + test(dedent` --- foo: f: a --- `), - ).toThrowErrorMatchingInlineSnapshot(` + ).rejects.toThrowErrorMatchingInlineSnapshot(` "incomplete explicit mapping pair; a key node is missed; or followed by a non-tabulated empty line at line 2, column 7: foo: f: a ^" diff --git a/packages/docusaurus-utils/src/__tests__/urlUtils.test.ts b/packages/docusaurus-utils/src/__tests__/urlUtils.test.ts index 30625e4005..301a91ae32 100644 --- a/packages/docusaurus-utils/src/__tests__/urlUtils.test.ts +++ b/packages/docusaurus-utils/src/__tests__/urlUtils.test.ts @@ -18,6 +18,8 @@ import { buildSshUrl, buildHttpsUrl, hasSSHProtocol, + parseURLPath, + serializeURLPath, } from '../urlUtils'; describe('normalizeUrl', () => { @@ -232,6 +234,137 @@ describe('removeTrailingSlash', () => { }); }); +describe('parseURLPath', () => { + it('parse and resolve pathname', () => { + expect(parseURLPath('')).toEqual({ + pathname: '/', + search: undefined, + hash: undefined, + }); + expect(parseURLPath('/')).toEqual({ + pathname: '/', + search: undefined, + hash: undefined, + }); + expect(parseURLPath('/page')).toEqual({ + pathname: '/page', + search: undefined, + hash: undefined, + }); + expect(parseURLPath('/dir1/page')).toEqual({ + pathname: '/dir1/page', + search: undefined, + hash: undefined, + }); + expect(parseURLPath('/dir1/dir2/./../page')).toEqual({ + pathname: '/dir1/page', + search: undefined, + hash: undefined, + }); + expect(parseURLPath('/dir1/dir2/../..')).toEqual({ + pathname: '/', + search: undefined, + hash: undefined, + }); + expect(parseURLPath('/dir1/dir2/../../..')).toEqual({ + pathname: '/', + search: undefined, + hash: undefined, + }); + expect(parseURLPath('./dir1/dir2./../page', '/dir3/dir4/page2')).toEqual({ + pathname: '/dir3/dir4/dir1/page', + search: undefined, + hash: undefined, + }); + }); + + it('parse query string', () => { + expect(parseURLPath('/page')).toEqual({ + pathname: '/page', + search: undefined, + hash: undefined, + }); + expect(parseURLPath('/page?')).toEqual({ + pathname: '/page', + search: '', + hash: undefined, + }); + expect(parseURLPath('/page?test')).toEqual({ + pathname: '/page', + search: 'test', + hash: undefined, + }); + expect(parseURLPath('/page?age=42&great=true')).toEqual({ + pathname: '/page', + search: 'age=42&great=true', + hash: undefined, + }); + }); + + it('parse hash', () => { + expect(parseURLPath('/page')).toEqual({ + pathname: '/page', + search: undefined, + hash: undefined, + }); + expect(parseURLPath('/page#')).toEqual({ + pathname: '/page', + search: undefined, + hash: '', + }); + expect(parseURLPath('/page#anchor')).toEqual({ + pathname: '/page', + search: undefined, + hash: 'anchor', + }); + }); + + it('parse fancy real-world edge cases', () => { + expect(parseURLPath('/page?#')).toEqual({ + pathname: '/page', + search: '', + hash: '', + }); + expect( + parseURLPath('dir1/dir2/../page?age=42#anchor', '/dir3/page2'), + ).toEqual({ + pathname: '/dir3/dir1/page', + search: 'age=42', + hash: 'anchor', + }); + }); +}); + +describe('serializeURLPath', () => { + function test(input: string, base?: string, expectedOutput?: string) { + expect(serializeURLPath(parseURLPath(input, base))).toEqual( + expectedOutput ?? input, + ); + } + + it('works for already resolved paths', () => { + test('/'); + test('/dir1/page'); + test('/dir1/page?'); + test('/dir1/page#'); + test('/dir1/page?#'); + test('/dir1/page?age=42#anchor'); + }); + + it('works for relative paths', () => { + test('', undefined, '/'); + test('', '/dir1/dir2/page2', '/dir1/dir2/page2'); + test('page', '/dir1/dir2/page2', '/dir1/dir2/page'); + test('../page', '/dir1/dir2/page2', '/dir1/page'); + test('/dir1/dir2/../page', undefined, '/dir1/page'); + test( + '/dir1/dir2/../page?age=42#anchor', + undefined, + '/dir1/page?age=42#anchor', + ); + }); +}); + describe('resolvePathname', () => { it('works', () => { // These tests are directly copied from https://github.com/mjackson/resolve-pathname/blob/master/modules/__tests__/resolvePathname-test.js diff --git a/packages/docusaurus-utils/src/constants.ts b/packages/docusaurus-utils/src/constants.ts index 6ecba7aea0..5039d6989b 100644 --- a/packages/docusaurus-utils/src/constants.ts +++ b/packages/docusaurus-utils/src/constants.ts @@ -83,7 +83,9 @@ export const DEFAULT_I18N_DIR_NAME = 'i18n'; export const CODE_TRANSLATIONS_FILE_NAME = 'code.json'; /** Dev server opens on this port by default. */ -export const DEFAULT_PORT = 3000; +export const DEFAULT_PORT = process.env.PORT + ? parseInt(process.env.PORT, 10) + : 3000; /** Default plugin ID. */ export const DEFAULT_PLUGIN_ID = 'default'; diff --git a/packages/docusaurus-utils/src/index.ts b/packages/docusaurus-utils/src/index.ts index 5bb77a0c05..6db01244d0 100644 --- a/packages/docusaurus-utils/src/index.ts +++ b/packages/docusaurus-utils/src/index.ts @@ -48,6 +48,8 @@ export { encodePath, isValidPathname, resolvePathname, + parseURLPath, + serializeURLPath, addLeadingSlash, addTrailingSlash, removeTrailingSlash, @@ -55,6 +57,7 @@ export { buildHttpsUrl, buildSshUrl, } from './urlUtils'; +export type {URLPath} from './urlUtils'; export { type Tag, type TagsListItem, @@ -70,9 +73,9 @@ export { unwrapMdxCodeBlocks, admonitionTitleToDirectiveLabel, createExcerpt, - parseFrontMatter, + DEFAULT_PARSE_FRONT_MATTER, parseMarkdownContentTitle, - parseMarkdownString, + parseMarkdownFile, writeMarkdownHeadingId, type WriteHeadingIDOptions, } from './markdownUtils'; diff --git a/packages/docusaurus-utils/src/markdownLinks.ts b/packages/docusaurus-utils/src/markdownLinks.ts index 88825e502f..13afca3390 100644 --- a/packages/docusaurus-utils/src/markdownLinks.ts +++ b/packages/docusaurus-utils/src/markdownLinks.ts @@ -128,7 +128,7 @@ export function replaceMarkdownLinks ({ const linkSuffixPattern = '(?:\\?[^#>\\s]+)?(?:#[^>\\s]+)?'; const linkCapture = (forbidden: string) => `((?!https?://|@site/)[^${forbidden}#?]+)`; - const linkURLPattern = `(?:${linkCapture( + const linkURLPattern = `(?:(?!<)${linkCapture( '()\\s', )}${linkSuffixPattern}|<${linkCapture('>')}${linkSuffixPattern}>)`; const linkPattern = new RegExp( diff --git a/packages/docusaurus-utils/src/markdownUtils.ts b/packages/docusaurus-utils/src/markdownUtils.ts index a2ca3db101..87aac88f09 100644 --- a/packages/docusaurus-utils/src/markdownUtils.ts +++ b/packages/docusaurus-utils/src/markdownUtils.ts @@ -8,6 +8,10 @@ import logger from '@docusaurus/logger'; import matter from 'gray-matter'; import {createSlugger, type Slugger, type SluggerOptions} from './slugger'; +import type { + ParseFrontMatter, + DefaultParseFrontMatter, +} from '@docusaurus/types'; // Some utilities for parsing Markdown content. These things are only used on // server-side when we infer metadata like `title` and `description` from the @@ -214,19 +218,40 @@ export function createExcerpt(fileString: string): string | undefined { * --- * ``` */ -export function parseFrontMatter(markdownFileContent: string): { +export function parseFileContentFrontMatter(fileContent: string): { /** Front matter as parsed by gray-matter. */ frontMatter: {[key: string]: unknown}; /** The remaining content, trimmed. */ content: string; } { - const {data, content} = matter(markdownFileContent); + // TODO Docusaurus v4: replace gray-matter by a better lib + // gray-matter is unmaintained, not flexible, and the code doesn't look good + const {data, content} = matter(fileContent); + + // gray-matter has an undocumented front matter caching behavior + // https://github.com/jonschlinkert/gray-matter/blob/ce67a86dba419381db0dd01cc84e2d30a1d1e6a5/index.js#L39 + // Unfortunately, this becomes a problem when we mutate returned front matter + // We want to make it possible as part of the parseFrontMatter API + // So we make it safe to mutate by always providing a deep copy + const frontMatter = + // And of course structuredClone() doesn't work well with Date in Jest... + // See https://github.com/jestjs/jest/issues/2549 + // So we parse again for tests with a {} option object + // This undocumented empty option object disables gray-matter caching.. + process.env.JEST_WORKER_ID + ? matter(fileContent, {}).data + : structuredClone(data); + return { - frontMatter: data, + frontMatter, content: content.trim(), }; } +export const DEFAULT_PARSE_FRONT_MATTER: DefaultParseFrontMatter = async ( + params, +) => parseFileContentFrontMatter(params.fileContent); + function toTextContentTitle(contentTitle: string): string { return contentTitle.replace(/`(? [^`]*)`/g, '$ '); } @@ -309,10 +334,16 @@ export function parseMarkdownContentTitle( * @throws Throws when `parseFrontMatter` throws, usually because of invalid * syntax. */ -export function parseMarkdownString( - markdownFileContent: string, - options?: ParseMarkdownContentTitleOptions, -): { +export async function parseMarkdownFile({ + filePath, + fileContent, + parseFrontMatter, + removeContentTitle, +}: { + filePath: string; + fileContent: string; + parseFrontMatter: ParseFrontMatter; +} & ParseMarkdownContentTitleOptions): Promise<{ /** @see {@link parseFrontMatter} */ frontMatter: {[key: string]: unknown}; /** @see {@link parseMarkdownContentTitle} */ @@ -324,14 +355,18 @@ export function parseMarkdownString( * the `removeContentTitle` option. */ content: string; -} { +}> { try { const {frontMatter, content: contentWithoutFrontMatter} = - parseFrontMatter(markdownFileContent); + await parseFrontMatter({ + filePath, + fileContent, + defaultParseFrontMatter: DEFAULT_PARSE_FRONT_MATTER, + }); const {content, contentTitle} = parseMarkdownContentTitle( contentWithoutFrontMatter, - options, + {removeContentTitle}, ); const excerpt = createExcerpt(content); diff --git a/packages/docusaurus-utils/src/urlUtils.ts b/packages/docusaurus-utils/src/urlUtils.ts index bb901a291d..8a7af4aa4b 100644 --- a/packages/docusaurus-utils/src/urlUtils.ts +++ b/packages/docusaurus-utils/src/urlUtils.ts @@ -165,14 +165,73 @@ export function isValidPathname(str: string): boolean { } } +export type URLPath = {pathname: string; search?: string; hash?: string}; + +// Let's name the concept of (pathname + search + hash) as URLPath +// See also https://twitter.com/kettanaito/status/1741768992866308120 +// Note: this function also resolves relative pathnames while parsing! +export function parseURLPath(urlPath: string, fromPath?: string): URLPath { + function parseURL(url: string, base?: string | URL): URL { + try { + // A possible alternative? https://github.com/unjs/ufo#url + return new URL(url, base ?? 'https://example.com'); + } catch (e) { + throw new Error( + `Can't parse URL ${url}${base ? ` with base ${base}` : ''}`, + {cause: e}, + ); + } + } + + const base = fromPath ? parseURL(fromPath) : undefined; + const url = parseURL(urlPath, base); + + const {pathname} = url; + + // Fixes annoying url.search behavior + // "" => undefined + // "?" => "" + // "?param => "param" + const search = url.search + ? url.search.slice(1) + : urlPath.includes('?') + ? '' + : undefined; + + // Fixes annoying url.hash behavior + // "" => undefined + // "#" => "" + // "?param => "param" + const hash = url.hash + ? url.hash.slice(1) + : urlPath.includes('#') + ? '' + : undefined; + + return { + pathname, + search, + hash, + }; +} + +export function serializeURLPath(urlPath: URLPath): string { + const search = urlPath.search === undefined ? '' : `?${urlPath.search}`; + const hash = urlPath.hash === undefined ? '' : `#${urlPath.hash}`; + return `${urlPath.pathname}${search}${hash}`; +} + /** * Resolve pathnames and fail-fast if resolution fails. Uses standard URL * semantics (provided by `resolve-pathname` which is used internally by React * router) */ export function resolvePathname(to: string, from?: string): string { + // TODO do we really need resolve-pathname lib anymore? + // possible alternative: decodeURI(parseURLPath(to, from).pathname); return resolvePathnameUnsafe(to, from); } + /** Appends a leading slash to `str`, if one doesn't exist. */ export function addLeadingSlash(str: string): string { return addPrefix(str, '/'); diff --git a/packages/docusaurus/bin/docusaurus.mjs b/packages/docusaurus/bin/docusaurus.mjs index a2738c3866..b65915bcd6 100755 --- a/packages/docusaurus/bin/docusaurus.mjs +++ b/packages/docusaurus/bin/docusaurus.mjs @@ -218,6 +218,9 @@ cli.arguments(' ').action((cmd) => { logger.error` Unknown command name=${cmd}.`; }); +// === The above is the commander configuration === +// They don't start any code execution yet until cli.parse() is called below + /** * @param {string | undefined} command */ @@ -237,12 +240,29 @@ function isInternalCommand(command) { ); } -if (!isInternalCommand(process.argv.slice(2)[0])) { - await externalCommand(cli); +// process.argv always looks like this: +// [ +// '/path/to/node', +// '/path/to/docusaurus.mjs', +// ' ', +// ...subcommandArgs +// ] + +// There is no subcommand +// TODO: can we use commander to handle this case? +if (process.argv.length < 3 || process.argv[2]?.startsWith('--')) { + cli.outputHelp(); + process.exit(1); } -if (!process.argv.slice(2).length) { - cli.outputHelp(); +// There is an unrecognized subcommand +// Let plugins extend the CLI before parsing +if (!isInternalCommand(process.argv[2])) { + // TODO: in this step, we must assume default site structure because there's + // no way to know the siteDir/config yet. Maybe the root cli should be + // responsible for parsing these arguments? + // https://github.com/facebook/docusaurus/issues/8903 + await externalCommand(cli); } cli.parse(process.argv); diff --git a/packages/docusaurus/package.json b/packages/docusaurus/package.json index 503faa3400..1edbc79b9e 100644 --- a/packages/docusaurus/package.json +++ b/packages/docusaurus/package.json @@ -1,7 +1,7 @@ { "name": "@docusaurus/core", "description": "Easy to Maintain Open Source Documentation Websites", - "version": "3.0.1", + "version": "3.1.0", "license": "MIT", "publishConfig": { "access": "public" @@ -43,13 +43,13 @@ "@babel/runtime": "^7.22.6", "@babel/runtime-corejs3": "^7.22.6", "@babel/traverse": "^7.22.8", - "@docusaurus/cssnano-preset": "3.0.1", - "@docusaurus/logger": "3.0.1", - "@docusaurus/mdx-loader": "3.0.1", + "@docusaurus/cssnano-preset": "3.1.0", + "@docusaurus/logger": "3.1.0", + "@docusaurus/mdx-loader": "3.1.0", "@docusaurus/react-loadable": "5.5.2", - "@docusaurus/utils": "3.0.1", - "@docusaurus/utils-common": "3.0.1", - "@docusaurus/utils-validation": "3.0.1", + "@docusaurus/utils": "3.1.0", + "@docusaurus/utils-common": "3.1.0", + "@docusaurus/utils-validation": "3.1.0", "@slorber/static-site-generator-webpack-plugin": "^4.0.7", "@svgr/webpack": "^6.5.1", "autoprefixer": "^10.4.14", @@ -104,8 +104,8 @@ "webpackbar": "^5.0.2" }, "devDependencies": { - "@docusaurus/module-type-aliases": "3.0.1", - "@docusaurus/types": "3.0.1", + "@docusaurus/module-type-aliases": "3.1.0", + "@docusaurus/types": "3.1.0", "@types/detect-port": "^1.3.3", "@types/react-dom": "^18.2.7", "@types/react-router-config": "^5.0.7", diff --git a/packages/docusaurus/src/client/BrokenLinksContext.tsx b/packages/docusaurus/src/client/BrokenLinksContext.tsx new file mode 100644 index 0000000000..e04e8ab147 --- /dev/null +++ b/packages/docusaurus/src/client/BrokenLinksContext.tsx @@ -0,0 +1,51 @@ +/** + * Copyright (c) Facebook, Inc. and its affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + */ + +import React, {type ReactNode, useContext} from 'react'; +import type {BrokenLinks} from '@docusaurus/useBrokenLinks'; + +export type StatefulBrokenLinks = BrokenLinks & { + getCollectedLinks: () => string[]; + getCollectedAnchors: () => string[]; +}; + +export const createStatefulBrokenLinks = (): StatefulBrokenLinks => { + // Set to dedup, as it's not useful to collect multiple times the same value + const allAnchors = new Set (); + const allLinks = new Set (); + return { + collectAnchor: (anchor: string): void => { + allAnchors.add(anchor); + }, + collectLink: (link: string): void => { + allLinks.add(link); + }, + getCollectedAnchors: (): string[] => [...allAnchors], + getCollectedLinks: (): string[] => [...allLinks], + }; +}; + +const Context = React.createContext ({ + collectAnchor: () => { + // No-op for client + }, + collectLink: () => { + // No-op for client + }, +}); + +export const useBrokenLinksContext = (): BrokenLinks => useContext(Context); + +export function BrokenLinksProvider({ + children, + brokenLinks, +}: { + children: ReactNode; + brokenLinks: BrokenLinks; +}): JSX.Element { + return {children} ; +} diff --git a/packages/docusaurus/src/client/LinksCollector.tsx b/packages/docusaurus/src/client/LinksCollector.tsx deleted file mode 100644 index d0fb33b9ec..0000000000 --- a/packages/docusaurus/src/client/LinksCollector.tsx +++ /dev/null @@ -1,45 +0,0 @@ -/** - * Copyright (c) Facebook, Inc. and its affiliates. - * - * This source code is licensed under the MIT license found in the - * LICENSE file in the root directory of this source tree. - */ - -import React, {type ReactNode, useContext} from 'react'; - -type LinksCollector = { - collectLink: (link: string) => void; -}; - -type StatefulLinksCollector = LinksCollector & { - getCollectedLinks: () => string[]; -}; - -export const createStatefulLinksCollector = (): StatefulLinksCollector => { - // Set to dedup, as it's not useful to collect multiple times the same link - const allLinks = new Set(); - return { - collectLink: (link: string): void => { - allLinks.add(link); - }, - getCollectedLinks: (): string[] => [...allLinks], - }; -}; - -const Context = React.createContext ({ - collectLink: () => { - // No-op for client. We only use the broken links checker server-side. - }, -}); - -export const useLinksCollector = (): LinksCollector => useContext(Context); - -export function LinksCollectorProvider({ - children, - linksCollector, -}: { - children: ReactNode; - linksCollector: LinksCollector; -}): JSX.Element { - return {children} ; -} diff --git a/packages/docusaurus/src/client/exports/Link.tsx b/packages/docusaurus/src/client/exports/Link.tsx index 4a7453dfef..8b886c8e70 100644 --- a/packages/docusaurus/src/client/exports/Link.tsx +++ b/packages/docusaurus/src/client/exports/Link.tsx @@ -16,7 +16,7 @@ import {applyTrailingSlash} from '@docusaurus/utils-common'; import useDocusaurusContext from './useDocusaurusContext'; import isInternalUrl from './isInternalUrl'; import ExecutionEnvironment from './ExecutionEnvironment'; -import {useLinksCollector} from '../LinksCollector'; +import useBrokenLinks from './useBrokenLinks'; import {useBaseUrlUtils} from './useBaseUrl'; import type {Props} from '@docusaurus/Link'; @@ -44,7 +44,7 @@ function Link( siteConfig: {trailingSlash, baseUrl}, } = useDocusaurusContext(); const {withBaseUrl} = useBaseUrlUtils(); - const linksCollector = useLinksCollector(); + const brokenLinks = useBrokenLinks(); const innerRef = useRef(null); useImperativeHandle(forwardedRef, () => innerRef.current!); @@ -144,7 +144,7 @@ function Link( const isRegularHtmlLink = !targetLink || !isInternal || isAnchorLink; if (!isRegularHtmlLink && !noBrokenLinkCheck) { - linksCollector.collectLink(targetLink!); + brokenLinks.collectLink(targetLink!); } return isRegularHtmlLink ? ( diff --git a/packages/docusaurus/src/client/exports/useBrokenLinks.ts b/packages/docusaurus/src/client/exports/useBrokenLinks.ts new file mode 100644 index 0000000000..979aa399cd --- /dev/null +++ b/packages/docusaurus/src/client/exports/useBrokenLinks.ts @@ -0,0 +1,13 @@ +/** + * Copyright (c) Facebook, Inc. and its affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + */ + +import {useBrokenLinksContext} from '../BrokenLinksContext'; +import type {BrokenLinks} from '@docusaurus/useBrokenLinks'; + +export default function useBrokenLinks(): BrokenLinks { + return useBrokenLinksContext(); +} diff --git a/packages/docusaurus/src/client/serverEntry.tsx b/packages/docusaurus/src/client/serverEntry.tsx index 2d67558926..c01c4779e9 100644 --- a/packages/docusaurus/src/client/serverEntry.tsx +++ b/packages/docusaurus/src/client/serverEntry.tsx @@ -20,9 +20,9 @@ import {renderStaticApp} from './serverRenderer'; import preload from './preload'; import App from './App'; import { - createStatefulLinksCollector, - LinksCollectorProvider, -} from './LinksCollector'; + createStatefulBrokenLinks, + BrokenLinksProvider, +} from './BrokenLinksContext'; import type {Locals} from '@slorber/static-site-generator-webpack-plugin'; const getCompiledSSRTemplate = _.memoize((template: string) => @@ -96,23 +96,27 @@ async function doRender(locals: Locals & {path: string}) { const routerContext = {}; const helmetContext = {}; - const linksCollector = createStatefulLinksCollector(); + const statefulBrokenLinks = createStatefulBrokenLinks(); const app = ( // @ts-expect-error: we are migrating away from react-loadable anyways modules.add(moduleName)}> ); const appHtml = await renderStaticApp(app); - onLinksCollected(location, linksCollector.getCollectedLinks()); + onLinksCollected({ + staticPagePath: location, + anchors: statefulBrokenLinks.getCollectedAnchors(), + links: statefulBrokenLinks.getCollectedLinks(), + }); const {helmet} = helmetContext as FilledContext; const htmlAttributes = helmet.htmlAttributes.toString(); diff --git a/packages/docusaurus/src/commands/build.ts b/packages/docusaurus/src/commands/build.ts index 3b20912150..c0a3816409 100644 --- a/packages/docusaurus/src/commands/build.ts +++ b/packages/docusaurus/src/commands/build.ts @@ -152,8 +152,8 @@ async function buildLocale({ generatedFilesDir, plugins, siteConfig: { - baseUrl, onBrokenLinks, + onBrokenAnchors, staticDirectories: staticDirectoriesOption, }, routes, @@ -180,13 +180,15 @@ async function buildLocale({ }, ); - const allCollectedLinks: {[location: string]: string[]} = {}; + const collectedLinks: { + [pathname: string]: {links: string[]; anchors: string[]}; + } = {}; const headTags: {[location: string]: HelmetServerState} = {}; let serverConfig: Configuration = await createServerConfig({ props, - onLinksCollected: (staticPagePath, links) => { - allCollectedLinks[staticPagePath] = links; + onLinksCollected: ({staticPagePath, links, anchors}) => { + collectedLinks[staticPagePath] = {links, anchors}; }, onHeadTagsCollected: (staticPagePath, tags) => { headTags[staticPagePath] = tags; @@ -288,11 +290,10 @@ async function buildLocale({ ); await handleBrokenLinks({ - allCollectedLinks, + collectedLinks, routes, onBrokenLinks, - outDir, - baseUrl, + onBrokenAnchors, }); logger.success`Generated static files in path=${path.relative( diff --git a/packages/docusaurus/src/deps.d.ts b/packages/docusaurus/src/deps.d.ts index 49bca18d06..199f399009 100644 --- a/packages/docusaurus/src/deps.d.ts +++ b/packages/docusaurus/src/deps.d.ts @@ -42,7 +42,11 @@ declare module '@slorber/static-site-generator-webpack-plugin' { headTags: string; preBodyTags: string; postBodyTags: string; - onLinksCollected: (staticPagePath: string, links: string[]) => void; + onLinksCollected: (params: { + staticPagePath: string; + links: string[]; + anchors: string[]; + }) => void; onHeadTagsCollected: ( staticPagePath: string, tags: HelmetServerState, diff --git a/packages/docusaurus/src/server/__tests__/__snapshots__/brokenLinks.test.ts.snap b/packages/docusaurus/src/server/__tests__/__snapshots__/brokenLinks.test.ts.snap deleted file mode 100644 index 8aa3a3e837..0000000000 --- a/packages/docusaurus/src/server/__tests__/__snapshots__/brokenLinks.test.ts.snap +++ /dev/null @@ -1,86 +0,0 @@ -// Jest Snapshot v1, https://goo.gl/fbAQLP - -exports[`handleBrokenLinks reports all broken links 1`] = ` -"Docusaurus found broken links! - -Please check the pages of your site in the list below, and make sure you don't reference any path that does not exist. -Note: it's possible to ignore broken links with the 'onBrokenLinks' Docusaurus configuration, and let the build pass. - -Exhaustive list of all broken links found: - -- On source page path = /docs/good doc with space: - -> linking to ./some%20other%20non-existent%20doc1 (resolved as: /docs/some%20other%20non-existent%20doc1) - -> linking to ./break%2F..%2F..%2Fout2 (resolved as: /docs/break%2F..%2F..%2Fout2) - -- On source page path = /docs/goodDoc: - -> linking to ../anotherGoodDoc#reported-because-of-bad-relative-path1 (resolved as: /anotherGoodDoc) - -> linking to ./docThatDoesNotExist2 (resolved as: /docs/docThatDoesNotExist2) - -> linking to ./badRelativeLink3 (resolved as: /docs/badRelativeLink3) - -> linking to ../badRelativeLink4 (resolved as: /badRelativeLink4) - -- On source page path = /community: - -> linking to /someNonExistentDoc1 - -> linking to /badLink2 - -> linking to ./badLink3 (resolved as: /badLink3) - -- On source page path = /page1: - -> linking to /link1 - -> linking to /emptyFolder - -- On source page path = /page2: - -> linking to /docs/link2 - -> linking to /emptyFolder/ - -> linking to /hey/link3 -" -`; - -exports[`handleBrokenLinks reports frequent broken links 1`] = ` -"Docusaurus found broken links! - -Please check the pages of your site in the list below, and make sure you don't reference any path that does not exist. -Note: it's possible to ignore broken links with the 'onBrokenLinks' Docusaurus configuration, and let the build pass. - -It looks like some of the broken links we found appear in many pages of your site. -Maybe those broken links appear on all pages through your site layout? -We recommend that you check your theme configuration for such links (particularly, theme navbar and footer). -Frequent broken links are linking to: -- /frequent -- ./maybe-not - -Exhaustive list of all broken links found: - -- On source page path = /docs/good doc with space: - -> linking to ./some%20other%20non-existent%20doc1 (resolved as: /docs/some%20other%20non-existent%20doc1) - -> linking to ./break%2F..%2F..%2Fout2 (resolved as: /docs/break%2F..%2F..%2Fout2) - -> linking to /frequent - -> linking to ./maybe-not (resolved as: /docs/maybe-not) - -- On source page path = /docs/goodDoc: - -> linking to ../anotherGoodDoc#reported-because-of-bad-relative-path1 (resolved as: /anotherGoodDoc) - -> linking to ./docThatDoesNotExist2 (resolved as: /docs/docThatDoesNotExist2) - -> linking to ./badRelativeLink3 (resolved as: /docs/badRelativeLink3) - -> linking to ../badRelativeLink4 (resolved as: /badRelativeLink4) - -> linking to /frequent - -> linking to ./maybe-not (resolved as: /docs/maybe-not) - -- On source page path = /community: - -> linking to /someNonExistentDoc1 - -> linking to /badLink2 - -> linking to ./badLink3 (resolved as: /badLink3) - -> linking to /frequent - -> linking to ./maybe-not (resolved as: /maybe-not) - -- On source page path = /page1: - -> linking to /link1 - -> linking to /emptyFolder - -> linking to /frequent - -> linking to ./maybe-not (resolved as: /maybe-not) - -- On source page path = /page2: - -> linking to /docs/link2 - -> linking to /emptyFolder/ - -> linking to /hey/link3 - -> linking to /frequent - -> linking to ./maybe-not (resolved as: /maybe-not) -" -`; diff --git a/packages/docusaurus/src/server/__tests__/__snapshots__/config.test.ts.snap b/packages/docusaurus/src/server/__tests__/__snapshots__/config.test.ts.snap index 2ed2b796fd..c10c483390 100644 --- a/packages/docusaurus/src/server/__tests__/__snapshots__/config.test.ts.snap +++ b/packages/docusaurus/src/server/__tests__/__snapshots__/config.test.ts.snap @@ -24,9 +24,12 @@ exports[`loadSiteConfig website with .cjs siteConfig 1`] = ` "headingIds": true, }, "mermaid": false, + "parseFrontMatter": [Function], "preprocessor": undefined, + "remarkRehypeOptions": undefined, }, "noIndex": false, + "onBrokenAnchors": "warn", "onBrokenLinks": "throw", "onBrokenMarkdownLinks": "warn", "onDuplicateRoutes": "warn", @@ -72,9 +75,12 @@ exports[`loadSiteConfig website with ts + js config 1`] = ` "headingIds": true, }, "mermaid": false, + "parseFrontMatter": [Function], "preprocessor": undefined, + "remarkRehypeOptions": undefined, }, "noIndex": false, + "onBrokenAnchors": "warn", "onBrokenLinks": "throw", "onBrokenMarkdownLinks": "warn", "onDuplicateRoutes": "warn", @@ -120,9 +126,12 @@ exports[`loadSiteConfig website with valid JS CJS config 1`] = ` "headingIds": true, }, "mermaid": false, + "parseFrontMatter": [Function], "preprocessor": undefined, + "remarkRehypeOptions": undefined, }, "noIndex": false, + "onBrokenAnchors": "warn", "onBrokenLinks": "throw", "onBrokenMarkdownLinks": "warn", "onDuplicateRoutes": "warn", @@ -168,9 +177,12 @@ exports[`loadSiteConfig website with valid JS ESM config 1`] = ` "headingIds": true, }, "mermaid": false, + "parseFrontMatter": [Function], "preprocessor": undefined, + "remarkRehypeOptions": undefined, }, "noIndex": false, + "onBrokenAnchors": "warn", "onBrokenLinks": "throw", "onBrokenMarkdownLinks": "warn", "onDuplicateRoutes": "warn", @@ -216,9 +228,12 @@ exports[`loadSiteConfig website with valid TypeScript CJS config 1`] = ` "headingIds": true, }, "mermaid": false, + "parseFrontMatter": [Function], "preprocessor": undefined, + "remarkRehypeOptions": undefined, }, "noIndex": false, + "onBrokenAnchors": "warn", "onBrokenLinks": "throw", "onBrokenMarkdownLinks": "warn", "onDuplicateRoutes": "warn", @@ -264,9 +279,12 @@ exports[`loadSiteConfig website with valid TypeScript ESM config 1`] = ` "headingIds": true, }, "mermaid": false, + "parseFrontMatter": [Function], "preprocessor": undefined, + "remarkRehypeOptions": undefined, }, "noIndex": false, + "onBrokenAnchors": "warn", "onBrokenLinks": "throw", "onBrokenMarkdownLinks": "warn", "onDuplicateRoutes": "warn", @@ -312,9 +330,12 @@ exports[`loadSiteConfig website with valid async config 1`] = ` "headingIds": true, }, "mermaid": false, + "parseFrontMatter": [Function], "preprocessor": undefined, + "remarkRehypeOptions": undefined, }, "noIndex": false, + "onBrokenAnchors": "warn", "onBrokenLinks": "throw", "onBrokenMarkdownLinks": "warn", "onDuplicateRoutes": "warn", @@ -362,9 +383,12 @@ exports[`loadSiteConfig website with valid async config creator function 1`] = ` "headingIds": true, }, "mermaid": false, + "parseFrontMatter": [Function], "preprocessor": undefined, + "remarkRehypeOptions": undefined, }, "noIndex": false, + "onBrokenAnchors": "warn", "onBrokenLinks": "throw", "onBrokenMarkdownLinks": "warn", "onDuplicateRoutes": "warn", @@ -412,9 +436,12 @@ exports[`loadSiteConfig website with valid config creator function 1`] = ` "headingIds": true, }, "mermaid": false, + "parseFrontMatter": [Function], "preprocessor": undefined, + "remarkRehypeOptions": undefined, }, "noIndex": false, + "onBrokenAnchors": "warn", "onBrokenLinks": "throw", "onBrokenMarkdownLinks": "warn", "onDuplicateRoutes": "warn", @@ -465,9 +492,12 @@ exports[`loadSiteConfig website with valid siteConfig 1`] = ` "headingIds": true, }, "mermaid": false, + "parseFrontMatter": [Function], "preprocessor": undefined, + "remarkRehypeOptions": undefined, }, "noIndex": false, + "onBrokenAnchors": "warn", "onBrokenLinks": "throw", "onBrokenMarkdownLinks": "warn", "onDuplicateRoutes": "warn", diff --git a/packages/docusaurus/src/server/__tests__/__snapshots__/index.test.ts.snap b/packages/docusaurus/src/server/__tests__/__snapshots__/index.test.ts.snap index 45b94b8694..cba9622267 100644 --- a/packages/docusaurus/src/server/__tests__/__snapshots__/index.test.ts.snap +++ b/packages/docusaurus/src/server/__tests__/__snapshots__/index.test.ts.snap @@ -98,9 +98,12 @@ exports[`load loads props for site with custom i18n path 1`] = ` "headingIds": true, }, "mermaid": false, + "parseFrontMatter": [Function], "preprocessor": undefined, + "remarkRehypeOptions": undefined, }, "noIndex": false, + "onBrokenAnchors": "warn", "onBrokenLinks": "throw", "onBrokenMarkdownLinks": "warn", "onDuplicateRoutes": "warn", diff --git a/packages/docusaurus/src/server/__tests__/brokenLinks.test.ts b/packages/docusaurus/src/server/__tests__/brokenLinks.test.ts index 40e76ed45b..158af9165a 100644 --- a/packages/docusaurus/src/server/__tests__/brokenLinks.test.ts +++ b/packages/docusaurus/src/server/__tests__/brokenLinks.test.ts @@ -6,190 +6,608 @@ */ import {jest} from '@jest/globals'; -import path from 'path'; -import _ from 'lodash'; import {handleBrokenLinks} from '../brokenLinks'; import type {RouteConfig} from '@docusaurus/types'; +type Params = Parameters- + +- [0]; + +// We don't need all the routes attributes for our tests +type SimpleRoute = {path: string; routes?: SimpleRoute[]}; + +// Conveniently apply defaults to function under test +async function testBrokenLinks(params: { + collectedLinks?: Params['collectedLinks']; + onBrokenLinks?: Params['onBrokenLinks']; + onBrokenAnchors?: Params['onBrokenAnchors']; + routes?: SimpleRoute[]; +}) { + await handleBrokenLinks({ + collectedLinks: {}, + onBrokenLinks: 'throw', + onBrokenAnchors: 'throw', + ...params, + // Unsafe but convenient for tests + routes: (params.routes ?? []) as RouteConfig[], + }); +} + describe('handleBrokenLinks', () => { - const routes: RouteConfig[] = [ - { - path: '/community', - component: '', - }, - { - path: '/docs', - component: '', + it('accepts valid link', async () => { + await testBrokenLinks({ + routes: [{path: '/page1'}, {path: '/page2'}], + collectedLinks: { + '/page1': {links: ['/page2'], anchors: []}, + '/page2': {links: [], anchors: []}, + }, + }); + }); + + it('accepts valid link to uncollected page', async () => { + await testBrokenLinks({ + routes: [{path: '/page1'}, {path: '/page2'}], + collectedLinks: { + '/page1': {links: ['/page2'], anchors: []}, + // /page2 is absent on purpose: it doesn't contain any link/anchor + }, + }); + }); + + it('accepts valid link to nested route', async () => { + await testBrokenLinks({ routes: [ - {path: '/docs/goodDoc', component: ''}, - {path: '/docs/anotherGoodDoc', component: ''}, - {path: '/docs/good doc with space', component: ''}, - {path: '/docs/another good doc with space', component: ''}, - {path: '/docs/weird%20but%20good', component: ''}, + {path: '/page1'}, + {path: '/nested/', routes: [{path: '/nested/page2'}]}, ], - }, - { - path: '*', - component: '', - }, - ]; - - const link1 = '/link1'; - const link2 = '/docs/link2'; - const link3 = '/hey/link3'; - - const linkToJavadoc1 = '/javadoc'; - const linkToJavadoc2 = '/javadoc/'; - const linkToJavadoc3 = '/javadoc/index.html'; - const linkToJavadoc4 = '/javadoc/index.html#foo'; - - const linkToZipFile = '/files/file.zip'; - const linkToHtmlFile1 = '/files/hey.html'; - const linkToHtmlFile2 = '/files/hey'; - - const linkToEmptyFolder1 = '/emptyFolder'; - const linkToEmptyFolder2 = '/emptyFolder/'; - const allCollectedLinks = { - '/docs/good doc with space': [ - // Good - valid file with spaces in name - './another%20good%20doc%20with%20space', - // Good - valid file with percent-20 in its name - './weird%20but%20good', - // Bad - non-existent file with spaces in name - './some%20other%20non-existent%20doc1', - // Evil - trying to use ../../ but '/' won't get decoded - // cSpell:ignore Fout - './break%2F..%2F..%2Fout2', - ], - '/docs/goodDoc': [ - // Good links - './anotherGoodDoc#someHash', - '/docs/anotherGoodDoc?someQueryString=true#someHash', - '../docs/anotherGoodDoc?someQueryString=true', - '../docs/anotherGoodDoc#someHash', - // Bad links - '../anotherGoodDoc#reported-because-of-bad-relative-path1', - './docThatDoesNotExist2', - './badRelativeLink3', - '../badRelativeLink4', - ], - '/community': [ - // Good links - '/docs/goodDoc', - '/docs/anotherGoodDoc#someHash', - './docs/goodDoc#someHash', - './docs/anotherGoodDoc', - // Bad links - '/someNonExistentDoc1', - '/badLink2', - './badLink3', - ], - '/page1': [ - link1, - linkToHtmlFile1, - linkToJavadoc1, - linkToHtmlFile2, - linkToJavadoc3, - linkToJavadoc4, - linkToEmptyFolder1, // Not filtered! - ], - '/page2': [ - link2, - linkToEmptyFolder2, // Not filtered! - linkToJavadoc2, - link3, - linkToJavadoc3, - linkToZipFile, - ], - }; - - const outDir = path.resolve(__dirname, '__fixtures__/brokenLinks/outDir'); - - it('do not report anything for correct paths', async () => { - const consoleMock = jest - .spyOn(console, 'warn') - .mockImplementation(() => {}); - const allCollectedCorrectLinks = { - '/docs/good doc with space': [ - './another%20good%20doc%20with%20space', - './weird%20but%20good', - ], - '/docs/goodDoc': [ - './anotherGoodDoc#someHash', - '/docs/anotherGoodDoc?someQueryString=true#someHash', - '../docs/anotherGoodDoc?someQueryString=true', - '../docs/anotherGoodDoc#someHash', - ], - '/community': [ - '/docs/goodDoc', - '/docs/anotherGoodDoc#someHash', - './docs/goodDoc#someHash', - './docs/anotherGoodDoc', - ], - '/page1': [ - linkToHtmlFile1, - linkToJavadoc1, - linkToHtmlFile2, - linkToJavadoc3, - linkToJavadoc4, - ], - }; - await handleBrokenLinks({ - allCollectedLinks: allCollectedCorrectLinks, - onBrokenLinks: 'warn', - routes, - baseUrl: '/', - outDir, + collectedLinks: { + '/page1': {links: ['/nested/page2'], anchors: []}, + }, }); - expect(consoleMock).toHaveBeenCalledTimes(0); }); - it('reports all broken links', async () => { + it('accepts valid relative link', async () => { + await testBrokenLinks({ + routes: [{path: '/dir/page1'}, {path: '/dir/page2'}], + collectedLinks: { + '/dir/page1': { + links: ['./page2', '../dir/page2', '/dir/page2'], + anchors: [], + }, + }, + }); + }); + + it('accepts valid link with anchor', async () => { + await testBrokenLinks({ + routes: [{path: '/page1'}, {path: '/page2'}], + collectedLinks: { + '/page1': {links: ['/page2#page2anchor'], anchors: []}, + '/page2': {links: [], anchors: ['page2anchor']}, + }, + }); + }); + + it('accepts valid link with querystring + anchor', async () => { + await testBrokenLinks({ + routes: [{path: '/page1'}, {path: '/page2'}], + collectedLinks: { + '/page1': { + links: ['/page2?age=42&theme=dark#page2anchor'], + anchors: [], + }, + '/page2': {links: [], anchors: ['page2anchor']}, + }, + }); + }); + + it('accepts valid link to self', async () => { + await testBrokenLinks({ + routes: [{path: '/page1'}], + collectedLinks: { + '/page1': { + links: [ + '/page1', + './page1', + '', + '/page1#anchor1', + '#anchor1', + '/page1?age=42#anchor1', + '?age=42#anchor1', + ], + anchors: ['anchor1'], + }, + }, + }); + }); + + it('accepts valid link with spaces and encoding', async () => { + await testBrokenLinks({ + routes: [{path: '/page 1'}, {path: '/page 2'}], + collectedLinks: { + '/page 1': { + links: [ + '/page 1', + '/page%201', + '/page%201?age=42', + '/page 2', + '/page%202', + '/page%202?age=42', + '/page%202?age=42#page2anchor', + ], + anchors: [], + }, + '/page 2': {links: [], anchors: ['page2anchor']}, + }, + }); + }); + + it('rejects broken link', async () => { await expect(() => - handleBrokenLinks({ - allCollectedLinks, - onBrokenLinks: 'throw', - routes, - baseUrl: '/', - outDir, + testBrokenLinks({ + routes: [{path: '/page1'}, {path: '/page2'}], + collectedLinks: { + '/page1': {links: ['/brokenLink'], anchors: []}, + }, }), - ).rejects.toThrowErrorMatchingSnapshot(); + ).rejects.toThrowErrorMatchingInlineSnapshot(` + "Docusaurus found broken links! + + Please check the pages of your site in the list below, and make sure you don't reference any path that does not exist. + Note: it's possible to ignore broken links with the 'onBrokenLinks' Docusaurus configuration, and let the build pass. + + Exhaustive list of all broken links found: + - Broken link on source page path = /page1: + -> linking to /brokenLink + " + `); }); - it('no-op for ignore', async () => { - // In any case, _.mapValues will always be called, unless handleBrokenLinks - // has already bailed - const lodashMock = jest.spyOn(_, 'mapValues'); - await handleBrokenLinks({ - allCollectedLinks, + it('rejects broken link with anchor', async () => { + await expect(() => + testBrokenLinks({ + routes: [{path: '/page1'}, {path: '/page2'}], + collectedLinks: { + '/page1': {links: ['/brokenLink#anchor'], anchors: []}, + }, + }), + ).rejects.toThrowErrorMatchingInlineSnapshot(` + "Docusaurus found broken links! + + Please check the pages of your site in the list below, and make sure you don't reference any path that does not exist. + Note: it's possible to ignore broken links with the 'onBrokenLinks' Docusaurus configuration, and let the build pass. + + Exhaustive list of all broken links found: + - Broken link on source page path = /page1: + -> linking to /brokenLink#anchor + " + `); + }); + + it('rejects broken link with querystring + anchor', async () => { + await expect(() => + testBrokenLinks({ + routes: [{path: '/page1'}, {path: '/page2'}], + collectedLinks: { + '/page1': {links: ['/brokenLink?age=42#anchor'], anchors: []}, + }, + }), + ).rejects.toThrowErrorMatchingInlineSnapshot(` + "Docusaurus found broken links! + + Please check the pages of your site in the list below, and make sure you don't reference any path that does not exist. + Note: it's possible to ignore broken links with the 'onBrokenLinks' Docusaurus configuration, and let the build pass. + + Exhaustive list of all broken links found: + - Broken link on source page path = /page1: + -> linking to /brokenLink?age=42#anchor + " + `); + }); + + it('rejects valid link with broken anchor', async () => { + await expect(() => + testBrokenLinks({ + routes: [{path: '/page1'}, {path: '/page2'}], + collectedLinks: { + '/page1': {links: ['/page2#brokenAnchor'], anchors: []}, + '/page2': {links: [], anchors: []}, + }, + }), + ).rejects.toThrowErrorMatchingInlineSnapshot(` + "Docusaurus found broken anchors! + + Please check the pages of your site in the list below, and make sure you don't reference any anchor that does not exist. + Note: it's possible to ignore broken anchors with the 'onBrokenAnchors' Docusaurus configuration, and let the build pass. + + Exhaustive list of all broken anchors found: + - Broken anchor on source page path = /page1: + -> linking to /page2#brokenAnchor + " + `); + }); + + it('rejects valid link with empty broken anchor', async () => { + await expect(() => + testBrokenLinks({ + routes: [{path: '/page1'}, {path: '/page2'}], + collectedLinks: { + '/page1': {links: ['/page2#'], anchors: []}, + '/page2': {links: [], anchors: []}, + }, + }), + ).rejects.toThrowErrorMatchingInlineSnapshot(` + "Docusaurus found broken anchors! + + Please check the pages of your site in the list below, and make sure you don't reference any anchor that does not exist. + Note: it's possible to ignore broken anchors with the 'onBrokenAnchors' Docusaurus configuration, and let the build pass. + + Exhaustive list of all broken anchors found: + - Broken anchor on source page path = /page1: + -> linking to /page2# + " + `); + }); + + it('rejects valid link with broken anchor + query-string', async () => { + await expect(() => + testBrokenLinks({ + routes: [{path: '/page1'}, {path: '/page2'}], + collectedLinks: { + '/page1': { + links: ['/page2?age=42&theme=dark#brokenAnchor'], + anchors: [], + }, + '/page2': {links: [], anchors: []}, + }, + }), + ).rejects.toThrowErrorMatchingInlineSnapshot(` + "Docusaurus found broken anchors! + + Please check the pages of your site in the list below, and make sure you don't reference any anchor that does not exist. + Note: it's possible to ignore broken anchors with the 'onBrokenAnchors' Docusaurus configuration, and let the build pass. + + Exhaustive list of all broken anchors found: + - Broken anchor on source page path = /page1: + -> linking to /page2?age=42&theme=dark#brokenAnchor + " + `); + }); + + it('rejects valid link with broken anchor to self', async () => { + await expect(() => + testBrokenLinks({ + routes: [{path: '/page1'}], + collectedLinks: { + '/page1': { + links: [ + '/page1', + '', + '#goodAnchor', + '/page1#goodAnchor', + '/page1?age=42#goodAnchor', + '#badAnchor1', + '/page1#badAnchor2', + '/page1?age=42#badAnchor3', + ], + + anchors: ['goodAnchor'], + }, + }, + }), + ).rejects.toThrowErrorMatchingInlineSnapshot(` + "Docusaurus found broken anchors! + + Please check the pages of your site in the list below, and make sure you don't reference any anchor that does not exist. + Note: it's possible to ignore broken anchors with the 'onBrokenAnchors' Docusaurus configuration, and let the build pass. + + Exhaustive list of all broken anchors found: + - Broken anchor on source page path = /page1: + -> linking to #badAnchor1 (resolved as: /page1#badAnchor1) + -> linking to /page1#badAnchor2 + -> linking to /page1?age=42#badAnchor3 + " + `); + }); + + it('rejects valid link with broken anchor to uncollected page', async () => { + await expect(() => + testBrokenLinks({ + routes: [{path: '/page1'}, {path: '/page2'}], + collectedLinks: { + '/page1': {links: ['/page2#brokenAnchor'], anchors: []}, + // /page2 is absent on purpose: it doesn't contain any link/anchor + }, + }), + ).rejects.toThrowErrorMatchingInlineSnapshot(` + "Docusaurus found broken anchors! + + Please check the pages of your site in the list below, and make sure you don't reference any anchor that does not exist. + Note: it's possible to ignore broken anchors with the 'onBrokenAnchors' Docusaurus configuration, and let the build pass. + + Exhaustive list of all broken anchors found: + - Broken anchor on source page path = /page1: + -> linking to /page2#brokenAnchor + " + `); + }); + + it('rejects broken anchor with query-string to uncollected page', async () => { + await expect(() => + testBrokenLinks({ + routes: [{path: '/page1'}, {path: '/page2'}], + collectedLinks: { + '/page1': { + links: ['/page2?age=42&theme=dark#brokenAnchor'], + anchors: [], + }, + // /page2 is absent on purpose: it doesn't contain any link/anchor + }, + }), + ).rejects.toThrowErrorMatchingInlineSnapshot(` + "Docusaurus found broken anchors! + + Please check the pages of your site in the list below, and make sure you don't reference any anchor that does not exist. + Note: it's possible to ignore broken anchors with the 'onBrokenAnchors' Docusaurus configuration, and let the build pass. + + Exhaustive list of all broken anchors found: + - Broken anchor on source page path = /page1: + -> linking to /page2?age=42&theme=dark#brokenAnchor + " + `); + }); + + it('can ignore broken links', async () => { + await testBrokenLinks({ onBrokenLinks: 'ignore', - routes, - baseUrl: '/', - outDir, + routes: [{path: '/page1'}], + collectedLinks: { + '/page1': { + links: ['/page2'], + anchors: [], + }, + }, }); - expect(lodashMock).toHaveBeenCalledTimes(0); - lodashMock.mockRestore(); }); - it('reports frequent broken links', async () => { - Object.values(allCollectedLinks).forEach((links) => - links.push( - '/frequent', - // This is in the gray area of what should be reported. Relative paths - // may be resolved to different slugs on different locations. But if - // this comes from a layout link, it should be reported anyways - './maybe-not', - ), + it('can ignore broken anchors', async () => { + await testBrokenLinks({ + onBrokenAnchors: 'ignore', + routes: [{path: '/page1'}], + collectedLinks: { + '/page1': { + links: ['/page1#brokenAnchor'], + anchors: [], + }, + }, + }); + }); + + it('can ignore broken anchors but report broken link', async () => { + await expect(() => + testBrokenLinks({ + onBrokenAnchors: 'ignore', + routes: [{path: '/page1'}], + collectedLinks: { + '/page1': { + links: ['/page1#brokenAnchor', '/page2'], + anchors: [], + }, + }, + }), + ).rejects.toThrowErrorMatchingInlineSnapshot(` + "Docusaurus found broken links! + + Please check the pages of your site in the list below, and make sure you don't reference any path that does not exist. + Note: it's possible to ignore broken links with the 'onBrokenLinks' Docusaurus configuration, and let the build pass. + + Exhaustive list of all broken links found: + - Broken link on source page path = /page1: + -> linking to /page2 + " + `); + }); + + it('can ignore broken link but report broken anchors', async () => { + await expect(() => + testBrokenLinks({ + onBrokenLinks: 'ignore', + routes: [{path: '/page1'}], + collectedLinks: { + '/page1': { + links: [ + '/page2', + '/page1#brokenAnchor1', + '/page1#brokenAnchor2', + '#brokenAnchor3', + ], + + anchors: [], + }, + }, + }), + ).rejects.toThrowErrorMatchingInlineSnapshot(` + "Docusaurus found broken anchors! + + Please check the pages of your site in the list below, and make sure you don't reference any anchor that does not exist. + Note: it's possible to ignore broken anchors with the 'onBrokenAnchors' Docusaurus configuration, and let the build pass. + + Exhaustive list of all broken anchors found: + - Broken anchor on source page path = /page1: + -> linking to /page1#brokenAnchor1 + -> linking to /page1#brokenAnchor2 + -> linking to #brokenAnchor3 (resolved as: /page1#brokenAnchor3) + " + `); + }); + + it('can warn for broken links', async () => { + const warnMock = jest.spyOn(console, 'warn'); + + await testBrokenLinks({ + onBrokenLinks: 'warn', + routes: [{path: '/page1'}], + collectedLinks: { + '/page1': { + links: ['/page2'], + anchors: [], + }, + }, + }); + + expect(warnMock).toHaveBeenCalledTimes(1); + expect(warnMock.mock.calls).toMatchInlineSnapshot(` + [ + [ + "[WARNING] Docusaurus found broken links! + + Please check the pages of your site in the list below, and make sure you don't reference any path that does not exist. + Note: it's possible to ignore broken links with the 'onBrokenLinks' Docusaurus configuration, and let the build pass. + + Exhaustive list of all broken links found: + - Broken link on source page path = /page1: + -> linking to /page2 + ", + ], + ] + `); + warnMock.mockRestore(); + }); + + it('can warn for broken anchors', async () => { + const warnMock = jest.spyOn(console, 'warn'); + + await testBrokenLinks({ + onBrokenAnchors: 'warn', + routes: [{path: '/page1'}], + collectedLinks: { + '/page1': { + links: ['/page1#brokenAnchor'], + anchors: [], + }, + }, + }); + + expect(warnMock).toHaveBeenCalledTimes(1); + expect(warnMock.mock.calls).toMatchInlineSnapshot(` + [ + [ + "[WARNING] Docusaurus found broken anchors! + + Please check the pages of your site in the list below, and make sure you don't reference any anchor that does not exist. + Note: it's possible to ignore broken anchors with the 'onBrokenAnchors' Docusaurus configuration, and let the build pass. + + Exhaustive list of all broken anchors found: + - Broken anchor on source page path = /page1: + -> linking to /page1#brokenAnchor + ", + ], + ] + `); + warnMock.mockRestore(); + }); + + it('can warn for both broken links and anchors', async () => { + const warnMock = jest.spyOn(console, 'warn'); + + await testBrokenLinks({ + onBrokenLinks: 'warn', + onBrokenAnchors: 'warn', + routes: [{path: '/page1'}], + collectedLinks: { + '/page1': { + links: ['/page1#brokenAnchor', '/page2'], + anchors: [], + }, + }, + }); + + expect(warnMock).toHaveBeenCalledTimes(2); + expect(warnMock.mock.calls).toMatchInlineSnapshot(` + [ + [ + "[WARNING] Docusaurus found broken links! + + Please check the pages of your site in the list below, and make sure you don't reference any path that does not exist. + Note: it's possible to ignore broken links with the 'onBrokenLinks' Docusaurus configuration, and let the build pass. + + Exhaustive list of all broken links found: + - Broken link on source page path = /page1: + -> linking to /page2 + ", + ], + [ + "[WARNING] Docusaurus found broken anchors! + + Please check the pages of your site in the list below, and make sure you don't reference any anchor that does not exist. + Note: it's possible to ignore broken anchors with the 'onBrokenAnchors' Docusaurus configuration, and let the build pass. + + Exhaustive list of all broken anchors found: + - Broken anchor on source page path = /page1: + -> linking to /page1#brokenAnchor + ", + ], + ] + `); + warnMock.mockRestore(); + }); + + it('reports frequent broken links differently', async () => { + const pagePaths = [ + '/page1', + '/page2', + '/dir/page3', + '/dir/page4', + '/dir/page5', + ]; + + const routes: SimpleRoute[] = pagePaths.map((pagePath) => ({ + path: pagePath, + })); + + const collectedLinks: Params['collectedLinks'] = Object.fromEntries( + pagePaths.map((pagePath) => [ + pagePath, + { + links: ['/frequentBrokenLink', './relativeFrequentBrokenLink'], + anchors: [], + }, + ]), ); await expect(() => - handleBrokenLinks({ - allCollectedLinks, - onBrokenLinks: 'throw', + testBrokenLinks({ routes, - baseUrl: '/', - outDir, + collectedLinks, }), - ).rejects.toThrowErrorMatchingSnapshot(); + ).rejects.toThrowErrorMatchingInlineSnapshot(` + "Docusaurus found broken links! + + Please check the pages of your site in the list below, and make sure you don't reference any path that does not exist. + Note: it's possible to ignore broken links with the 'onBrokenLinks' Docusaurus configuration, and let the build pass. + + It looks like some of the broken links we found appear in many pages of your site. + Maybe those broken links appear on all pages through your site layout? + We recommend that you check your theme configuration for such links (particularly, theme navbar and footer). + Frequent broken links are linking to: + - /frequentBrokenLink + - ./relativeFrequentBrokenLink + + Exhaustive list of all broken links found: + - Broken link on source page path = /page1: + -> linking to /frequentBrokenLink + -> linking to ./relativeFrequentBrokenLink (resolved as: /relativeFrequentBrokenLink) + - Broken link on source page path = /page2: + -> linking to /frequentBrokenLink + -> linking to ./relativeFrequentBrokenLink (resolved as: /relativeFrequentBrokenLink) + - Broken link on source page path = /dir/page3: + -> linking to /frequentBrokenLink + -> linking to ./relativeFrequentBrokenLink (resolved as: /dir/relativeFrequentBrokenLink) + - Broken link on source page path = /dir/page4: + -> linking to /frequentBrokenLink + -> linking to ./relativeFrequentBrokenLink (resolved as: /dir/relativeFrequentBrokenLink) + - Broken link on source page path = /dir/page5: + -> linking to /frequentBrokenLink + -> linking to ./relativeFrequentBrokenLink (resolved as: /dir/relativeFrequentBrokenLink) + " + `); }); }); diff --git a/packages/docusaurus/src/server/__tests__/configValidation.test.ts b/packages/docusaurus/src/server/__tests__/configValidation.test.ts index b3bc7b2611..ea76af8140 100644 --- a/packages/docusaurus/src/server/__tests__/configValidation.test.ts +++ b/packages/docusaurus/src/server/__tests__/configValidation.test.ts @@ -61,12 +61,17 @@ describe('normalizeConfig', () => { markdown: { format: 'md', mermaid: true, + parseFrontMatter: async (params) => + params.defaultParseFrontMatter(params), preprocessor: ({fileContent}) => fileContent, mdx1Compat: { comments: true, admonitions: false, headingIds: true, }, + remarkRehypeOptions: { + footnoteLabel: 'Pied de page', + }, }, }; const normalizedConfig = normalizeConfig(userConfig); @@ -504,12 +509,19 @@ describe('markdown', () => { const markdown: DocusaurusConfig['markdown'] = { format: 'md', mermaid: true, + parseFrontMatter: async (params) => + params.defaultParseFrontMatter(params), preprocessor: ({fileContent}) => fileContent, mdx1Compat: { comments: false, admonitions: true, headingIds: false, }, + remarkRehypeOptions: { + footnoteLabel: 'Notes de bas de page', + // @ts-expect-error: we don't validate it on purpose + anyKey: 'heck we accept it on purpose', + }, }; expect( normalizeConfig({ diff --git a/packages/docusaurus/src/server/brokenLinks.ts b/packages/docusaurus/src/server/brokenLinks.ts index f443a4659c..ccbaadcd3f 100644 --- a/packages/docusaurus/src/server/brokenLinks.ts +++ b/packages/docusaurus/src/server/brokenLinks.ts @@ -5,45 +5,42 @@ * LICENSE file in the root directory of this source tree. */ -import fs from 'fs-extra'; -import path from 'path'; import _ from 'lodash'; import logger from '@docusaurus/logger'; -import combinePromises from 'combine-promises'; import {matchRoutes} from 'react-router-config'; -import {removePrefix, removeSuffix, resolvePathname} from '@docusaurus/utils'; +import {parseURLPath, serializeURLPath, type URLPath} from '@docusaurus/utils'; import {getAllFinalRoutes} from './utils'; import type {RouteConfig, ReportingSeverity} from '@docusaurus/types'; type BrokenLink = { link: string; resolvedLink: string; + anchor: boolean; }; -// matchRoutes does not support qs/anchors, so we remove it! -function onlyPathname(link: string) { - return link.split('#')[0]!.split('?')[0]!; -} +type BrokenLinksMap = {[pathname: string]: BrokenLink[]}; -function getPageBrokenLinks({ +// The linking data that has been collected on Docusaurus pages during SSG +// {rendered page pathname => links and anchors collected on that page} +type CollectedLinks = { + [pathname: string]: {links: string[]; anchors: string[]}; +}; + +function getBrokenLinksForPage({ + collectedLinks, pagePath, pageLinks, routes, }: { + collectedLinks: CollectedLinks; pagePath: string; pageLinks: string[]; + pageAnchors: string[]; routes: RouteConfig[]; }): BrokenLink[] { - // ReactRouter is able to support links like ./../somePath but `matchRoutes` - // does not do this resolution internally. We must resolve the links before - // using `matchRoutes`. `resolvePathname` is used internally by React Router - function resolveLink(link: string) { - const resolvedLink = resolvePathname(onlyPathname(link), pagePath); - return {link, resolvedLink}; - } - - function isBrokenLink(link: string) { - const matchedRoutes = [link, decodeURI(link)] + // console.log('routes:', routes); + function isPathBrokenLink(linkPath: URLPath) { + const matchedRoutes = [linkPath.pathname, decodeURI(linkPath.pathname)] // @ts-expect-error: React router types RouteConfig with an actual React // component, but we load route components with string paths. // We don't actually access component here, so it's fine. @@ -52,7 +49,52 @@ function getPageBrokenLinks({ return matchedRoutes.length === 0; } - return pageLinks.map(resolveLink).filter((l) => isBrokenLink(l.resolvedLink)); + function isAnchorBrokenLink(linkPath: URLPath) { + const {pathname, hash} = linkPath; + + // Link has no hash: it can't be a broken anchor link + if (hash === undefined) { + return false; + } + + const targetPage = + collectedLinks[pathname] || collectedLinks[decodeURI(pathname)]; + + // link with anchor to a page that does not exist (or did not collect any + // link/anchor) is considered as a broken anchor + if (!targetPage) { + return true; + } + + // it's a broken anchor if the target page exists + // but the anchor does not exist on that page + return !targetPage.anchors.includes(hash); + } + + const brokenLinks = pageLinks.flatMap((link) => { + const linkPath = parseURLPath(link, pagePath); + if (isPathBrokenLink(linkPath)) { + return [ + { + link, + resolvedLink: serializeURLPath(linkPath), + anchor: false, + }, + ]; + } + if (isAnchorBrokenLink(linkPath)) { + return [ + { + link, + resolvedLink: serializeURLPath(linkPath), + anchor: true, + }, + ]; + } + return []; + }); + + return brokenLinks; } /** @@ -66,45 +108,76 @@ function filterIntermediateRoutes(routesInput: RouteConfig[]): RouteConfig[] { return getAllFinalRoutes(routesWithout404); } -function getAllBrokenLinks({ - allCollectedLinks, +function getBrokenLinks({ + collectedLinks, routes, }: { - allCollectedLinks: {[location: string]: string[]}; + collectedLinks: CollectedLinks; routes: RouteConfig[]; -}): {[location: string]: BrokenLink[]} { +}): BrokenLinksMap { const filteredRoutes = filterIntermediateRoutes(routes); - const allBrokenLinks = _.mapValues(allCollectedLinks, (pageLinks, pagePath) => - getPageBrokenLinks({pageLinks, pagePath, routes: filteredRoutes}), + return _.mapValues(collectedLinks, (pageCollectedData, pagePath) => + getBrokenLinksForPage({ + collectedLinks, + pageLinks: pageCollectedData.links, + pageAnchors: pageCollectedData.anchors, + pagePath, + routes: filteredRoutes, + }), ); - - return _.pickBy(allBrokenLinks, (brokenLinks) => brokenLinks.length > 0); } -function getBrokenLinksErrorMessage(allBrokenLinks: { - [location: string]: BrokenLink[]; -}): string | undefined { - if (Object.keys(allBrokenLinks).length === 0) { +function brokenLinkMessage(brokenLink: BrokenLink): string { + const showResolvedLink = brokenLink.link !== brokenLink.resolvedLink; + return `${brokenLink.link}${ + showResolvedLink ? ` (resolved as: ${brokenLink.resolvedLink})` : '' + }`; +} + +function createBrokenLinksMessage( + pagePath: string, + brokenLinks: BrokenLink[], +): string { + const type = brokenLinks[0]?.anchor === true ? 'anchor' : 'link'; + + const anchorMessage = + brokenLinks.length > 0 + ? `- Broken ${type} on source page path = ${pagePath}: + -> linking to ${brokenLinks + .map(brokenLinkMessage) + .join('\n -> linking to ')}` + : ''; + + return `${anchorMessage}`; +} + +function createBrokenAnchorsMessage( + brokenAnchors: BrokenLinksMap, +): string | undefined { + if (Object.keys(brokenAnchors).length === 0) { return undefined; } - function brokenLinkMessage(brokenLink: BrokenLink): string { - const showResolvedLink = brokenLink.link !== brokenLink.resolvedLink; - return `${brokenLink.link}${ - showResolvedLink ? ` (resolved as: ${brokenLink.resolvedLink})` : '' - }`; - } + return `Docusaurus found broken anchors! - function pageBrokenLinksMessage( - pagePath: string, - brokenLinks: BrokenLink[], - ): string { - return ` -- On source page path = ${pagePath}: - -> linking to ${brokenLinks - .map(brokenLinkMessage) - .join('\n -> linking to ')}`; +Please check the pages of your site in the list below, and make sure you don't reference any anchor that does not exist. +Note: it's possible to ignore broken anchors with the 'onBrokenAnchors' Docusaurus configuration, and let the build pass. + +Exhaustive list of all broken anchors found: +${Object.entries(brokenAnchors) + .map(([pagePath, brokenLinks]) => + createBrokenLinksMessage(pagePath, brokenLinks), + ) + .join('\n')} +`; +} + +function createBrokenPathsMessage( + brokenPathsMap: BrokenLinksMap, +): string | undefined { + if (Object.keys(brokenPathsMap).length === 0) { + return undefined; } /** @@ -113,7 +186,7 @@ function getBrokenLinksErrorMessage(allBrokenLinks: { * this out. See https://github.com/facebook/docusaurus/issues/3567#issuecomment-706973805 */ function getLayoutBrokenLinksHelpMessage() { - const flatList = Object.entries(allBrokenLinks).flatMap( + const flatList = Object.entries(brokenPathsMap).flatMap( ([pagePage, brokenLinks]) => brokenLinks.map((brokenLink) => ({pagePage, brokenLink})), ); @@ -146,102 +219,78 @@ Please check the pages of your site in the list below, and make sure you don't r Note: it's possible to ignore broken links with the 'onBrokenLinks' Docusaurus configuration, and let the build pass.${getLayoutBrokenLinksHelpMessage()} Exhaustive list of all broken links found: -${Object.entries(allBrokenLinks) - .map(([pagePath, brokenLinks]) => - pageBrokenLinksMessage(pagePath, brokenLinks), +${Object.entries(brokenPathsMap) + .map(([pagePath, brokenPaths]) => + createBrokenLinksMessage(pagePath, brokenPaths), ) .join('\n')} `; } -async function isExistingFile(filePath: string) { - try { - return (await fs.stat(filePath)).isFile(); - } catch { - return false; - } -} +function splitBrokenLinks(brokenLinks: BrokenLinksMap): { + brokenPaths: BrokenLinksMap; + brokenAnchors: BrokenLinksMap; +} { + const brokenPaths: BrokenLinksMap = {}; + const brokenAnchors: BrokenLinksMap = {}; -// If a file actually exist on the file system, we know the link is valid -// even if docusaurus does not know about this file, so we don't report it -async function filterExistingFileLinks({ - baseUrl, - outDir, - allCollectedLinks, -}: { - baseUrl: string; - outDir: string; - allCollectedLinks: {[location: string]: string[]}; -}): Promise<{[location: string]: string[]}> { - async function linkFileExists(link: string) { - // /baseUrl/javadoc/ -> /outDir/javadoc - const baseFilePath = onlyPathname( - removeSuffix(`${outDir}/${removePrefix(link, baseUrl)}`, '/'), + Object.entries(brokenLinks).forEach(([pathname, pageBrokenLinks]) => { + const [anchorBrokenLinks, pathBrokenLinks] = _.partition( + pageBrokenLinks, + (link) => link.anchor, ); - // -> /outDir/javadoc - // -> /outDir/javadoc.html - // -> /outDir/javadoc/index.html - const filePathsToTry: string[] = [baseFilePath]; - if (!path.extname(baseFilePath)) { - filePathsToTry.push( - `${baseFilePath}.html`, - path.join(baseFilePath, 'index.html'), - ); + if (pathBrokenLinks.length > 0) { + brokenPaths[pathname] = pathBrokenLinks; } + if (anchorBrokenLinks.length > 0) { + brokenAnchors[pathname] = anchorBrokenLinks; + } + }); - for (const file of filePathsToTry) { - if (await isExistingFile(file)) { - return true; - } - } - return false; + return {brokenPaths, brokenAnchors}; +} + +function reportBrokenLinks({ + brokenLinks, + onBrokenLinks, + onBrokenAnchors, +}: { + brokenLinks: BrokenLinksMap; + onBrokenLinks: ReportingSeverity; + onBrokenAnchors: ReportingSeverity; +}) { + // We need to split the broken links reporting in 2 for better granularity + // This is because we need to report broken path/anchors independently + // For v3.x retro-compatibility, we can't throw by default for broken anchors + // TODO Docusaurus v4: make onBrokenAnchors throw by default? + const {brokenPaths, brokenAnchors} = splitBrokenLinks(brokenLinks); + + const pathErrorMessage = createBrokenPathsMessage(brokenPaths); + if (pathErrorMessage) { + logger.report(onBrokenLinks)(pathErrorMessage); } - return combinePromises( - _.mapValues(allCollectedLinks, async (links) => - ( - await Promise.all( - links.map(async (link) => ((await linkFileExists(link)) ? '' : link)), - ) - ).filter(Boolean), - ), - ); + const anchorErrorMessage = createBrokenAnchorsMessage(brokenAnchors); + if (anchorErrorMessage) { + logger.report(onBrokenAnchors)(anchorErrorMessage); + } } export async function handleBrokenLinks({ - allCollectedLinks, + collectedLinks, onBrokenLinks, + onBrokenAnchors, routes, - baseUrl, - outDir, }: { - allCollectedLinks: {[location: string]: string[]}; + collectedLinks: CollectedLinks; onBrokenLinks: ReportingSeverity; + onBrokenAnchors: ReportingSeverity; routes: RouteConfig[]; - baseUrl: string; - outDir: string; }): Promise { - if (onBrokenLinks === 'ignore') { + if (onBrokenLinks === 'ignore' && onBrokenAnchors === 'ignore') { return; } - - // If we link to a file like /myFile.zip, and the file actually exist for the - // file system. It is not a broken link, it may simply be a link to an - // existing static file... - const allCollectedLinksFiltered = await filterExistingFileLinks({ - allCollectedLinks, - baseUrl, - outDir, - }); - - const allBrokenLinks = getAllBrokenLinks({ - allCollectedLinks: allCollectedLinksFiltered, - routes, - }); - - const errorMessage = getBrokenLinksErrorMessage(allBrokenLinks); - if (errorMessage) { - logger.report(onBrokenLinks)(errorMessage); - } + const brokenLinks = getBrokenLinks({routes, collectedLinks}); + reportBrokenLinks({brokenLinks, onBrokenLinks, onBrokenAnchors}); } diff --git a/packages/docusaurus/src/server/configValidation.ts b/packages/docusaurus/src/server/configValidation.ts index 3f9de2ce68..13f554512c 100644 --- a/packages/docusaurus/src/server/configValidation.ts +++ b/packages/docusaurus/src/server/configValidation.ts @@ -6,6 +6,7 @@ */ import { + DEFAULT_PARSE_FRONT_MATTER, DEFAULT_STATIC_DIR_NAME, DEFAULT_I18N_DIR_NAME, addLeadingSlash, @@ -13,7 +14,11 @@ import { removeTrailingSlash, } from '@docusaurus/utils'; import {Joi, printWarning} from '@docusaurus/utils-validation'; -import type {DocusaurusConfig, I18nConfig} from '@docusaurus/types'; +import type { + DocusaurusConfig, + I18nConfig, + MarkdownConfig, +} from '@docusaurus/types'; const DEFAULT_I18N_LOCALE = 'en'; @@ -24,10 +29,24 @@ export const DEFAULT_I18N_CONFIG: I18nConfig = { localeConfigs: {}, }; +export const DEFAULT_MARKDOWN_CONFIG: MarkdownConfig = { + format: 'mdx', // TODO change this to "detect" in Docusaurus v4? + mermaid: false, + preprocessor: undefined, + parseFrontMatter: DEFAULT_PARSE_FRONT_MATTER, + mdx1Compat: { + comments: true, + admonitions: true, + headingIds: true, + }, + remarkRehypeOptions: undefined, +}; + export const DEFAULT_CONFIG: Pick< DocusaurusConfig, | 'i18n' | 'onBrokenLinks' + | 'onBrokenAnchors' | 'onBrokenMarkdownLinks' | 'onDuplicateRoutes' | 'plugins' @@ -48,6 +67,7 @@ export const DEFAULT_CONFIG: Pick< > = { i18n: DEFAULT_I18N_CONFIG, onBrokenLinks: 'throw', + onBrokenAnchors: 'warn', // TODO Docusaurus v4: change to throw onBrokenMarkdownLinks: 'warn', onDuplicateRoutes: 'warn', plugins: [], @@ -64,37 +84,26 @@ export const DEFAULT_CONFIG: Pick< tagline: '', baseUrlIssueBanner: true, staticDirectories: [DEFAULT_STATIC_DIR_NAME], - markdown: { - format: 'mdx', // TODO change this to "detect" in Docusaurus v4? - mermaid: false, - preprocessor: undefined, - mdx1Compat: { - comments: true, - admonitions: true, - headingIds: true, - }, - }, + markdown: DEFAULT_MARKDOWN_CONFIG, }; function createPluginSchema(theme: boolean) { - return ( - Joi.alternatives() - .try( - Joi.function(), - Joi.array() - .ordered(Joi.function().required(), Joi.object().required()) - .length(2), - Joi.string(), - Joi.array() - .ordered(Joi.string().required(), Joi.object().required()) - .length(2), - Joi.any().valid(false, null), - ) - // @ts-expect-error: bad lib def, doesn't recognize an array of reports - .error((errors) => { - errors.forEach((error) => { - const validConfigExample = theme - ? `Example valid theme config: + return Joi.alternatives() + .try( + Joi.function(), + Joi.array() + .ordered(Joi.function().required(), Joi.object().required()) + .length(2), + Joi.string(), + Joi.array() + .ordered(Joi.string().required(), Joi.object().required()) + .length(2), + Joi.any().valid(false, null), + ) + .error((errors) => { + errors.forEach((error) => { + const validConfigExample = theme + ? `Example valid theme config: { themes: [ ["@docusaurus/theme-classic",options], @@ -104,7 +113,7 @@ function createPluginSchema(theme: boolean) { [function myTheme() { },options] ], };` - : `Example valid plugin config: + : `Example valid plugin config: { plugins: [ ["@docusaurus/plugin-content-docs",options], @@ -115,17 +124,16 @@ function createPluginSchema(theme: boolean) { ], };`; - error.message = ` => Bad Docusaurus ${ - theme ? 'theme' : 'plugin' - } value ${error.path.reduce((acc, cur) => - typeof cur === 'string' ? `${acc}.${cur}` : `${acc}[${cur}]`, - )}. + error.message = ` => Bad Docusaurus ${ + theme ? 'theme' : 'plugin' + } value ${error.path.reduce((acc, cur) => + typeof cur === 'string' ? `${acc}.${cur}` : `${acc}[${cur}]`, + )}. ${validConfigExample} `; - }); - return errors; - }) - ); + }); + return errors; + }); } const PluginSchema = createPluginSchema(false); @@ -202,6 +210,9 @@ export const ConfigSchema = Joi.object ({ onBrokenLinks: Joi.string() .equal('ignore', 'log', 'warn', 'throw') .default(DEFAULT_CONFIG.onBrokenLinks), + onBrokenAnchors: Joi.string() + .equal('ignore', 'log', 'warn', 'throw') + .default(DEFAULT_CONFIG.onBrokenAnchors), onBrokenMarkdownLinks: Joi.string() .equal('ignore', 'log', 'warn', 'throw') .default(DEFAULT_CONFIG.onBrokenMarkdownLinks), @@ -280,6 +291,9 @@ export const ConfigSchema = Joi.object ({ format: Joi.string() .equal('mdx', 'md', 'detect') .default(DEFAULT_CONFIG.markdown.format), + parseFrontMatter: Joi.function().default( + () => DEFAULT_CONFIG.markdown.parseFrontMatter, + ), mermaid: Joi.boolean().default(DEFAULT_CONFIG.markdown.mermaid), preprocessor: Joi.function() .arity(1) @@ -296,6 +310,11 @@ export const ConfigSchema = Joi.object ({ DEFAULT_CONFIG.markdown.mdx1Compat.headingIds, ), }).default(DEFAULT_CONFIG.markdown.mdx1Compat), + remarkRehypeOptions: + // add proper external options validation? + // Not sure if it's a good idea, validation is likely to become stale + // See https://github.com/remarkjs/remark-rehype#options + Joi.object().unknown(), }).default(DEFAULT_CONFIG.markdown), }).messages({ 'docusaurus.configValidationWarning': diff --git a/packages/docusaurus/src/webpack/__tests__/__snapshots__/base.test.ts.snap b/packages/docusaurus/src/webpack/__tests__/__snapshots__/base.test.ts.snap index 9567720d58..7299deaf97 100644 --- a/packages/docusaurus/src/webpack/__tests__/__snapshots__/base.test.ts.snap +++ b/packages/docusaurus/src/webpack/__tests__/__snapshots__/base.test.ts.snap @@ -16,6 +16,7 @@ exports[`base webpack config creates webpack aliases 1`] = ` "@docusaurus/renderRoutes": "../../../../client/exports/renderRoutes.ts", "@docusaurus/router": "../../../../client/exports/router.ts", "@docusaurus/useBaseUrl": "../../../../client/exports/useBaseUrl.ts", + "@docusaurus/useBrokenLinks": "../../../../client/exports/useBrokenLinks.ts", "@docusaurus/useDocusaurusContext": "../../../../client/exports/useDocusaurusContext.ts", "@docusaurus/useGlobalData": "../../../../client/exports/useGlobalData.ts", "@docusaurus/useIsBrowser": "../../../../client/exports/useIsBrowser.ts", diff --git a/packages/docusaurus/src/webpack/aliases/__tests__/__snapshots__/index.test.ts.snap b/packages/docusaurus/src/webpack/aliases/__tests__/__snapshots__/index.test.ts.snap index c9738c847d..46390d21c9 100644 --- a/packages/docusaurus/src/webpack/aliases/__tests__/__snapshots__/index.test.ts.snap +++ b/packages/docusaurus/src/webpack/aliases/__tests__/__snapshots__/index.test.ts.snap @@ -16,6 +16,7 @@ exports[`getDocusaurusAliases returns appropriate webpack aliases 1`] = ` "@docusaurus/renderRoutes": " /packages/docusaurus/src/client/exports/renderRoutes.ts", "@docusaurus/router": " /packages/docusaurus/src/client/exports/router.ts", "@docusaurus/useBaseUrl": " /packages/docusaurus/src/client/exports/useBaseUrl.ts", + "@docusaurus/useBrokenLinks": " /packages/docusaurus/src/client/exports/useBrokenLinks.ts", "@docusaurus/useDocusaurusContext": " /packages/docusaurus/src/client/exports/useDocusaurusContext.ts", "@docusaurus/useGlobalData": " /packages/docusaurus/src/client/exports/useGlobalData.ts", "@docusaurus/useIsBrowser": " /packages/docusaurus/src/client/exports/useIsBrowser.ts", diff --git a/packages/eslint-plugin/package.json b/packages/eslint-plugin/package.json index e783475161..1d1c73d421 100644 --- a/packages/eslint-plugin/package.json +++ b/packages/eslint-plugin/package.json @@ -1,6 +1,6 @@ { "name": "@docusaurus/eslint-plugin", - "version": "3.0.1", + "version": "3.1.0", "description": "ESLint plugin to enforce best Docusaurus practices.", "main": "lib/index.js", "keywords": [ diff --git a/packages/lqip-loader/package.json b/packages/lqip-loader/package.json index 1972966052..c7f6a59c22 100644 --- a/packages/lqip-loader/package.json +++ b/packages/lqip-loader/package.json @@ -1,6 +1,6 @@ { "name": "@docusaurus/lqip-loader", - "version": "3.0.1", + "version": "3.1.0", "description": "Low Quality Image Placeholders (LQIP) loader for webpack.", "main": "lib/index.js", "publishConfig": { @@ -17,7 +17,7 @@ }, "license": "MIT", "dependencies": { - "@docusaurus/logger": "3.0.1", + "@docusaurus/logger": "3.1.0", "file-loader": "^6.2.0", "lodash": "^4.17.21", "sharp": "^0.32.3", diff --git a/packages/stylelint-copyright/package.json b/packages/stylelint-copyright/package.json index d6db8b8067..3113e41560 100644 --- a/packages/stylelint-copyright/package.json +++ b/packages/stylelint-copyright/package.json @@ -1,6 +1,6 @@ { "name": "stylelint-copyright", - "version": "3.0.1", + "version": "3.1.0", "description": "Stylelint plugin to check CSS files for a copyright header.", "main": "lib/index.js", "license": "MIT", diff --git a/project-words.txt b/project-words.txt index 203bcb7c0f..51136e9283 100644 --- a/project-words.txt +++ b/project-words.txt @@ -1,346 +1,323 @@ +# Project Words - DO NOT TOUCH - This is updated through CI abernathyca -adriaan -agan +Adriaan +Agan alexbdebrie -alexey +Alexey algoliasearch -anonymized +Anshul anshul août +APFS apfs -apos appinstalled -applanga +Applanga architecting -astro +Astro atrule -autoconverted +Autoconverted +Autogen autogen autogenerating autohide -autolinks -backport -backticks -bartosz +Autolinks +Bartosz beforeinstallprompt -bhatt -blocklist +Bhatt +Blockquotes blockquotes Bokmål -browserslist browserstack +Buble buble -builtins +Buble's bunx caabernathy cacheable callouts -callstack +Callstack camelcase -candillon +Candillon cdabcdab cdpath +Cena cena +Changefreq changefreq +Chedeau chedeau -cheng -clément -clsx -codegen -codeql +Clément codesandbox -codespaces +Codespaces commonmark contravariance corejs -crawlable creativecommons -csapo -cssnano -csvg +Csapo +Csvg +Customizability customizability +Dabit dabit -daishi +Daishi +Datagit datagit -datamap -datas -dbaeumer -décembre +Datagit's dedup -deduplicated -déja -deps -devcontainers -devs -devspace devto -dmitry -docgen +Dmitry docsearch -docsify +Docsify +Docu docu docusuarus -docz -doesn +Docz +Dogfood dogfood +Dogfooding dogfooding -dojocat +Dojocat +Dyte dyte +Déja easyops +Endi endi +Endi's +Endilie endilie endiliey -entrypoints -errnametoolong -esbenp -esbuild -eslintcache -estree +ERRNAMETOOLONG evaluable +Execa execa externalwaiting failfast Fargate -fbid -février -fienny +FBID +Fienny flac -flightcontrol -formik -fouc +Flightcontrol +Flightcontrol's +Formik +FOUC froms funboxteam +février gabrielcsapo -gantt -getopts -gifs -gitgraph -gitpod -globbing -globby -goss -goyal -gruntfuggly +Gifs +Goss +Goyal +Gtag gtag hahaha -hamel -hardcoding -hastscript +Hamel +Hasura hasura -heavener -héctor -héllô -heuristical +Heavener +Hideable hideable hola -horiz -hostman +Hostman hoverable -husain -ianad +Husain +Héctor +héllô +IANAD idempotency Iframes -immer +Immer +Infima infima +Infima's inlines -intelli +Intelli intellij interactiveness +Interpolatable interpolatable -investec -jakepartusch -jamstack +Investec janvier javadoc jiti jmarcey jodyheavener joshcena -jscodeshift jssdk juillet -kaszubowski +Kaszubowski +Katex katex -kato -keyscan +Kato +Keytar keytar -kinsta -knapen -koyeb -kubernetes -lamana +Kinsta +Knapen +Koyeb +Koyeb's +Lamana +Lifecycles lifecycles -lighthouserc +Linkify linkify -localizable +Localizable lockb -longpaths -lorber -lowercased +Lorber +Lorber's +LQIP lqip lunrjs -mapbox -marcey +Marcey +Marcey's +Markprompt markprompt -marocchino -massoud +Massoud mathjax maxlynch maxresdefault +MDAST mdast mdwn -mdxa -mdxast -mdxhast +MDXA +MDXAST +MDXHAST +Mdxjs mdxjs metadatum metastring metrica -metrika +Metrika +Microdata microdata -microlink -middlewares +Mindmap mindmap -minifier -mkcert -mkdir -mkdirs mkdn mkdocs mkdown +Moesif moesif msapplication -nabors -nakagawa +Nabors +Nakagawa nand +Navigations navigations navlink netrc -nextra +Nextra ngryman -nisarag +Nisarag noflash noicon -noindex nojekyll noninteractive -noreply npmjs -npmrc nprogress -ntfs -nuxt -o’shannessy -onboarded -openapi +Nuxt opensearch opensearchdescription opensource optimizt -optind +Orta orta +Outerbounds outerbounds overrideable +O’Shannessy pageview +Palenight palenight -paletton -palo +Paletton +Palo +Paraiso paraiso pathinfo -pathnames paularmstrong -pbcopy -pcss peaceiris philpl +Photoshop photoshop -picocolors picomatch Pipeable playbtn +Pluggable pluggable +Plushie plushie plushies -pnpm posthog -preactjs +Precache precache precached precaching preconfigured -preconnect -prefetch -prefetching -preloads -prepended -preprocess -preprocessors prerendered prerendering +printfn println prismjs producthunt +Profilo profilo -protobuf +Protobuf protobuffet -prpl -pyltsyn -qjpuv -qovery +PRPL +Pyltsyn +QJPUV +Qovery quasis -quddus -quddús +Quddus +Quddús +Quickwit quickwit -quotify rachelnabors -ramón +Ramón reactjs rearchitecture recrawl redirections -redoc +Redoc redocusaurus redwoodjs refactorings -regexes +Rehype rehype renderable -reponame -reqs -requireindex +REPONAME +Retrocompatibility retrocompatibility +Retrocompatible retrocompatible rmiz -roadmap -rocketvalidator rtcts rtlcss saurus -scaleway +Scaleway searchbar +Sebastien sebastien -sébastien sebastienlorber sensical -serializers setaf setext +setlocal +Shiki shiki shortcodes showinfo -sida -simen +Sida +Simen slorber sluggified sluggifies sluggify +Solana solana spâce stackblitz stackblitzrc -strikethrough +Strikethrough strikethroughs -styl stylelint stylelintrc subdir @@ -354,71 +331,68 @@ subsetting subsubcategory subsubfolder subsubsection +Subsubsubfolder subsubsubfolder -sucipto +Sucipto sunsetting +Supabase supabase +SVGR svgr swizzlable -teik +Sébastien +Teik templating -thanos -therox +Thanos +Therox toolset toplevel -transifex +Transifex transpiles +Treeified treeified treeifies treeify -treosh +Triaging triaging -tses +TSES twoslash typecheck -typechecks -typedoc +Typesense typesense -unavatar -unflat -unist +Unavatar unlinkable +Unlisteds unlisteds +Unlocalized unlocalized -unmatch unnormalized -unoptimized -unprefixed unswizzle -unversioned upvotes urlset -userland -vannicatte -vercel -verifymethod +Vannicatte +vbnet +Vetter vetter vfile -vicenti -vieira -viet -viewports -vinnik +Vicenti +Vieira +Viet +Vinnik vjeux waivable +WCAG wcag webfactory -webp webpackbar webstorm -wolcott -writeups -xclip -xplorer -xsoar -yacop +Wolcott +Xplorer +XSOAR +Yacop +Yangshun yangshun yangshunz -zhou +Zhou zoomable zpao diff --git a/website/_dogfooding/_docs tests/tests/visibility/force-unlisted.mdx b/website/_dogfooding/_docs tests/tests/visibility/force-unlisted.mdx new file mode 100644 index 0000000000..0801898442 --- /dev/null +++ b/website/_dogfooding/_docs tests/tests/visibility/force-unlisted.mdx @@ -0,0 +1,10 @@ +--- +unlisted: false +force_unlisted_parseFrontMatter_test: true +--- + +# force_unlisted_parseFrontMatter_test + +This doc is hidden despite `unlisted: false` + +We use `parseFrontMatter` to force it to true thanks to `force_unlisted_parseFrontMatter_test: true` diff --git a/website/_dogfooding/_docs tests/tests/visibility/index.mdx b/website/_dogfooding/_docs tests/tests/visibility/index.mdx index 88a78b5d6b..71c3712f2d 100644 --- a/website/_dogfooding/_docs tests/tests/visibility/index.mdx +++ b/website/_dogfooding/_docs tests/tests/visibility/index.mdx @@ -24,6 +24,7 @@ In production, unlisted items should remain accessible, but be hidden in the sid - [./some-unlisteds/unlisted1.md](./some-unlisteds/unlisted1.mdx) - [./some-unlisteds/unlisted2.md](./some-unlisteds/unlisted2.mdx) - [./some-unlisteds/unlisted-subcategory/unlisted3.md](./some-unlisteds/unlisted-subcategory/unlisted3.mdx) +- [./force-unlisted.mdx](./force-unlisted.mdx) --- diff --git a/website/_dogfooding/_pages tests/code-block-tests.mdx b/website/_dogfooding/_pages tests/code-block-tests.mdx index 3b778e7cea..27e751cfa6 100644 --- a/website/_dogfooding/_pages tests/code-block-tests.mdx +++ b/website/_dogfooding/_pages tests/code-block-tests.mdx @@ -384,6 +384,61 @@ y = times2(x); \end{document} ``` +```vbnet title="vbnet.vb" +Dim languages As New Set(Of String) From { + ' highlight-start + "C#", + "Visual Basic", + "F#", + ' highlight-end + "PowerShell", + ' highlight-next-line + "TypeScript" +} +``` + +```batch title="cmd.bat" +rem highlight-start +@echo off +setlocal +Rem highlight-end +ipconfig +REM highlight-next-line +echo Docusaurus is awesome +netstat +``` + +```fortran title="fortran.f90" +! highlight-start +program hello +! highlight-end + implicit none + ! highlight-next-line + print *, "Hello, World!" +end program hello +``` + +```cobol title="cobol.cob" +*> highlight-start +IDENTIFICATION DIVISION. +PROGRAM-ID. HELLO. +*> highlight-end +PROCEDURE DIVISION. + *> highlight-next-line + DISPLAY "Hello, World!". +END PROGRAM HELLO. +``` + +```fsharp title="fsharp.fsx" +(* highlight-start *) +[ ] +(* highlight-end *) +let main _ = + // highlight-next-line + printfn "Hello, World!" + 0 +``` + ## HTML - script + style highlighting See https://github.com/facebook/docusaurus/issues/9517 @@ -403,3 +458,33 @@ See https://github.com/facebook/docusaurus/issues/9517