Merge remote-tracking branch 'origin/slorber/try-to-optimize-git-calls' into slorber/try-to-optimize-git-calls

This commit is contained in:
sebastien 2025-10-31 10:43:19 +01:00
commit 5c60909000
614 changed files with 44760 additions and 12614 deletions

View File

@ -21,6 +21,7 @@
],
"ignorePaths": [
"CHANGELOG.md",
"CHANGELOG-v*.md",
"patches",
"packages/docusaurus-theme-translations/locales",
"packages/docusaurus-plugin-ideal-image/src/theme/IdealImageLegacy",

4
.eslintrc.js vendored
View File

@ -214,7 +214,7 @@ module.exports = {
],
'no-useless-escape': WARNING,
'no-void': [ERROR, {allowAsStatement: true}],
'prefer-destructuring': WARNING,
'prefer-destructuring': OFF,
'prefer-named-capture-group': WARNING,
'prefer-template': WARNING,
yoda: WARNING,
@ -304,7 +304,7 @@ module.exports = {
'jest/prefer-expect-resolves': WARNING,
'jest/prefer-lowercase-title': [WARNING, {ignore: ['describe']}],
'jest/prefer-spy-on': WARNING,
'jest/prefer-to-be': WARNING,
'jest/prefer-to-be': OFF,
'jest/prefer-to-have-length': WARNING,
'jest/require-top-level-describe': ERROR,
'jest/valid-title': [

View File

@ -27,10 +27,10 @@ jobs:
runs-on: ubuntu-latest
steps:
- name: Check out repository code
uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
- name: Use Node.js
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4.4.0
uses: actions/setup-node@2028fbc5c25fe9cf00d9f06a71cc4710d4507903 # v6.0.0
with:
node-version: lts/*
cache: yarn

View File

@ -22,9 +22,9 @@ jobs:
runs-on: ubuntu-latest
steps:
- name: Checkout
uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
- name: Set up Node
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4.4.0
uses: actions/setup-node@2028fbc5c25fe9cf00d9f06a71cc4710d4507903 # v6.0.0
with:
node-version: lts/*
cache: yarn

View File

@ -25,9 +25,9 @@ jobs:
runs-on: ubuntu-latest
steps:
- name: Checkout
uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
- name: Set up Node
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4.4.0
uses: actions/setup-node@2028fbc5c25fe9cf00d9f06a71cc4710d4507903 # v6.0.0
with:
node-version: lts/*
cache: yarn
@ -43,7 +43,7 @@ jobs:
# BASE_URL: '/docusaurus/' # hash router +
- name: Upload Website artifact
uses: actions/upload-artifact@v4
uses: actions/upload-artifact@v5
with:
name: website-hash-router-archive
path: website/build

View File

@ -41,9 +41,9 @@ jobs:
DOCUSAURUS_INFRA: ['SLOWER', 'FASTER']
steps:
- name: Checkout
uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
- name: Set up Node
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4.4.0
uses: actions/setup-node@2028fbc5c25fe9cf00d9f06a71cc4710d4507903 # v6.0.0
with:
node-version: lts/*
cache: yarn
@ -73,9 +73,9 @@ jobs:
DOCUSAURUS_INFRA: ['SLOWER', 'FASTER']
steps:
- name: Checkout
uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
- name: Set up Node
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4.4.0
uses: actions/setup-node@2028fbc5c25fe9cf00d9f06a71cc4710d4507903 # v6.0.0
with:
node-version: lts/*
cache: yarn

View File

@ -20,11 +20,11 @@ jobs:
runs-on: ubuntu-latest
steps:
- name: Checkout
uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
with:
fetch-depth: 0 # Needed to get the commit number with "git rev-list --count HEAD"
- name: Set up Node
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4.4.0
uses: actions/setup-node@2028fbc5c25fe9cf00d9f06a71cc4710d4507903 # v6.0.0
with:
node-version: lts/*
cache: yarn

View File

@ -33,12 +33,12 @@ jobs:
steps:
- name: Checkout
uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
- name: Initialize CodeQL
uses: github/codeql-action/init@2c779ab0d087cd7fe7b826087247c2c81f27bfa6 # 3.26.5
uses: github/codeql-action/init@4e94bd11f71e507f7f87df81788dff88d1dacbfb # 4.31.0
with:
languages: ${{ matrix.language }}
- name: Perform CodeQL Analysis
uses: github/codeql-action/analyze@2c779ab0d087cd7fe7b826087247c2c81f27bfa6 # 3.26.5
uses: github/codeql-action/analyze@4e94bd11f71e507f7f87df81788dff88d1dacbfb # 4.31.0

View File

@ -18,10 +18,10 @@ jobs:
steps:
- name: Checkout
uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
- name: Set up Node
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4.4.0
uses: actions/setup-node@2028fbc5c25fe9cf00d9f06a71cc4710d4507903 # v6.0.0
with:
node-version: lts/*
cache: yarn

View File

@ -13,6 +13,6 @@ jobs:
runs-on: ubuntu-latest
steps:
- name: Checkout
uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
- name: Dependency Review
uses: actions/dependency-review-action@da24556b548a50705dd671f47852072ea4c105d9 # 4.7.1
uses: actions/dependency-review-action@40c09b7dc99638e5ddb0bfd91c1673effc064d8a # 4.8.1

View File

@ -1,7 +1,7 @@
name: Lighthouse Report
on:
pull_request_target:
pull_request:
branches:
- main
- docusaurus-v**
@ -21,10 +21,10 @@ jobs:
runs-on: ubuntu-latest
steps:
- name: Checkout
uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
- name: Use Node.js
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4.4.0
uses: actions/setup-node@2028fbc5c25fe9cf00d9f06a71cc4710d4507903 # v6.0.0
with:
node-version: lts/*
cache: yarn
@ -37,7 +37,7 @@ jobs:
- name: Audit URLs using Lighthouse
id: lighthouse_audit
uses: treosh/lighthouse-ci-action@2f8dda6cf4de7d73b29853c3f29e73a01e297bd8 # 12.1.0
uses: treosh/lighthouse-ci-action@fcd65974f7c4c2bf0ee9d09b84d2489183c29726 # 12.6.1
with:
urls: |
http://localhost:3000
@ -53,7 +53,7 @@ jobs:
- name: Format lighthouse score
id: format_lighthouse_score
uses: actions/github-script@60a0d83039c74a4aee543508d2ffcb1c3799cdea # 7.0.1
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # 8.0.0
with:
github-token: ${{ secrets.GITHUB_TOKEN }}
script: |
@ -65,7 +65,7 @@ jobs:
- name: Add Lighthouse stats as comment
id: comment_to_pr
uses: marocchino/sticky-pull-request-comment@67d0dec7b07ed060a405f9b2a64b8ab319fdd7db # 2.9.2
uses: marocchino/sticky-pull-request-comment@773744901bac0e8cbb5a0dc842800d45e9b2b405 # 2.9.4
with:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
number: ${{ github.event.pull_request.number }}

View File

@ -19,7 +19,7 @@ jobs:
contents: write
steps:
- uses: actions/checkout@v4
- uses: actions/checkout@v5
with:
repository: ${{ github.event.pull_request.head.repo.full_name }}
ref: ${{ github.head_ref }}
@ -42,6 +42,6 @@ jobs:
- name: Print Diff
run: git diff
- uses: stefanzweifel/git-auto-commit-action@v5
- uses: stefanzweifel/git-auto-commit-action@v7
with:
commit_message: 'refactor: apply lint autofix'

View File

@ -20,9 +20,9 @@ jobs:
runs-on: ubuntu-latest
steps:
- name: Checkout
uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
- name: Set up Node
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4.4.0
uses: actions/setup-node@2028fbc5c25fe9cf00d9f06a71cc4710d4507903 # v6.0.0
with:
node-version: lts/*
cache: yarn

View File

@ -22,9 +22,9 @@ jobs:
runs-on: ubuntu-latest
steps:
- name: Checkout
uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
- name: Set up Node
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4.4.0
uses: actions/setup-node@2028fbc5c25fe9cf00d9f06a71cc4710d4507903 # v6.0.0
with:
node-version: lts/*
cache: yarn

View File

@ -38,12 +38,12 @@ jobs:
runs-on: ubuntu-latest
strategy:
matrix:
node: ['18.0', '20', '22', '24']
node: ['20.0', '20', '22', '24', '25']
steps:
- name: Checkout
uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
- name: Use Node.js ${{ matrix.node }}
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4.4.0
uses: actions/setup-node@2028fbc5c25fe9cf00d9f06a71cc4710d4507903 # v6.0.0
with:
node-version: ${{ matrix.node }}
cache: yarn
@ -72,6 +72,48 @@ jobs:
DOCUSAURUS_PERF_LOGGER: 'true'
working-directory: ../test-website
yarn-v1-windows:
name: E2E — Yarn v1 Windows
timeout-minutes: 30
runs-on: windows-8-core
steps:
- name: Checkout
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
- name: Use Node.js LTS
uses: actions/setup-node@2028fbc5c25fe9cf00d9f06a71cc4710d4507903 # v6.0.0
with:
node-version: lts/*
cache: yarn
- name: Installation
run: yarn || yarn || yarn
- name: Generate test-website project against main branch
# Not using test-release.sh => no verdaccio docker image on Windows
# run: bash ./admin/scripts/test-release.sh -s
run: yarn create-docusaurus test-website-in-workspace classic --typescript
- name: Install test-website project with Yarn v1
run: yarn || yarn || yarn
working-directory: test-website-in-workspace
- name: Start test-website project
run: yarn start --no-open
working-directory: test-website-in-workspace
env:
E2E_TEST: true
- name: Build test-website project
# We build 2 locales to ensure a localized site doesn't leak memory
# See https://github.com/facebook/docusaurus/pull/10599
run: yarn build --locale en --locale fr
env:
# Our website should build even with limited memory
# See https://github.com/facebook/docusaurus/pull/10590
NODE_OPTIONS: '--max-old-space-size=300'
DOCUSAURUS_PERF_LOGGER: 'true'
working-directory: test-website-in-workspace
- name: Upload Website artifact
uses: actions/upload-artifact@v5
with:
name: website-e2e-windows
path: test-website-in-workspace/build
yarn-berry:
name: E2E — Yarn Berry
timeout-minutes: 30
@ -82,9 +124,9 @@ jobs:
variant: [-s, -st]
steps:
- name: Checkout
uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
- name: Use Node.js LTS
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4.4.0
uses: actions/setup-node@2028fbc5c25fe9cf00d9f06a71cc4710d4507903 # v6.0.0
with:
node-version: lts/*
cache: yarn
@ -151,9 +193,9 @@ jobs:
runs-on: ubuntu-latest
steps:
- name: Checkout
uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
- name: Use Node.js LTS
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4.4.0
uses: actions/setup-node@2028fbc5c25fe9cf00d9f06a71cc4710d4507903 # v6.0.0
with:
node-version: lts/*
cache: yarn
@ -191,9 +233,9 @@ jobs:
runs-on: ubuntu-latest
steps:
- name: Checkout
uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
- name: Use Node.js LTS
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4.4.0
uses: actions/setup-node@2028fbc5c25fe9cf00d9f06a71cc4710d4507903 # v6.0.0
with:
node-version: lts/*
cache: yarn

View File

@ -26,9 +26,9 @@ jobs:
variant: ['js', 'ts']
steps:
- name: Checkout
uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
- name: Set up Node LTS
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4.4.0
uses: actions/setup-node@2028fbc5c25fe9cf00d9f06a71cc4710d4507903 # v6.0.0
with:
node-version: lts/*
cache: yarn

View File

@ -27,14 +27,14 @@ jobs:
runs-on: windows-latest
strategy:
matrix:
node: ['18.0', '20', '22', '24']
node: ['20.0', '20', '22', '24', '25']
steps:
- name: Support longpaths
run: git config --system core.longpaths true
- name: Checkout
uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
- name: Use Node.js ${{ matrix.node }}
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4.4.0
uses: actions/setup-node@2028fbc5c25fe9cf00d9f06a71cc4710d4507903 # v6.0.0
with:
node-version: ${{ matrix.node }}
cache: yarn
@ -56,12 +56,16 @@ jobs:
run: yarn build:website:fast
- name: TypeCheck website
# TODO temporary, remove TS skipLibCheck
# see https://github.com/facebook/docusaurus/pull/10486
run: yarn workspace website typecheck --project tsconfig.skipLibCheck.json
run: yarn workspace website typecheck
- name: TypeCheck website - min version - v5.1
run: |
yarn add typescript@5.1.6 --exact -D -W --ignore-scripts
# DocSearch@4/ai@5 doesn't support TS 5.1 (with skipLibCheck=false)
jq '.resolutions."@docsearch/react" = "^3.9.0"' package.json > package.json.tmp && mv -Force package.json.tmp package.json
yarn add @docsearch/react@^3.9.0 --exact -D -W --ignore-scripts
yarn workspace website typecheck
- name: TypeCheck website - max version - Latest
# For latest TS there are often lib check errors, so we disable it

View File

@ -27,12 +27,12 @@ jobs:
runs-on: ubuntu-latest
strategy:
matrix:
node: ['18.0', '20', '22', '24']
node: ['20.0', '20', '22', '24', '25']
steps:
- name: Checkout
uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
- name: Use Node.js ${{ matrix.node }}
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4.4.0
uses: actions/setup-node@2028fbc5c25fe9cf00d9f06a71cc4710d4507903 # v6.0.0
with:
node-version: ${{ matrix.node }}
cache: yarn
@ -55,12 +55,16 @@ jobs:
run: yarn workspace website test:css-order
- name: TypeCheck website
# TODO temporary, remove TS skipLibCheck
# see https://github.com/facebook/docusaurus/pull/10486
run: yarn workspace website typecheck --project tsconfig.skipLibCheck.json
run: yarn workspace website typecheck
- name: TypeCheck website - min version - v5.1
run: |
yarn add typescript@5.1.6 --exact -D -W --ignore-scripts
# DocSearch@4/ai@5 doesn't support TS 5.1 (with skipLibCheck=false)
jq '.resolutions."@docsearch/react" = "^3.9.0"' package.json > package.json.tmp && mv -f package.json.tmp package.json
yarn add @docsearch/react@^3.9.0 --exact -D -W --ignore-scripts
yarn workspace website typecheck
- name: TypeCheck website - max version - Latest
# For latest TS there are often lib check errors, so we disable it

1
.gitignore vendored
View File

@ -43,6 +43,7 @@ website/.cpu-prof
.netlify
website/rspack-tracing.json
website/rspack-tracing.pftrace
website/bundler-cpu-profile.json
website/profile.json.gz

6757
CHANGELOG-v2.md Normal file

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@ -82,7 +82,7 @@ Apart from the `good first issue`, the following labels are also worth looking a
- [`help wanted`](https://github.com/facebook/docusaurus/labels/help%20wanted): if you have specific knowledge in one domain, working on these issues can make your expertise shine.
- [`status: accepting pr`](https://github.com/facebook/docusaurus/labels/status%3A%20accepting%20pr): community contributors can feel free to claim any of these.
If you want to work on any of these issues, just drop a message saying "I'd like to work on this", and we will assign the issue to you and update the issue's status as "claimed". **You are expected to send a pull request within seven days** after that, so we can still delegate the issue to someone else if you are unavailable.
If you want to work on any of these issues, just drop a message saying "I am working on this". **You do not need to ask for assignment to work on any issue explicitly marked as welcoming external contributions.** However, don't "cookie lick", or squat on an issue without actually sending a PR. You are automatically considered as giving up if you don't **send a PR within seven days after your comment**, and the issue automatically becomes up for grabs again.
Alternatively, when opening an issue, you can also click the "self service" checkbox to indicate that you'd like to work on the issue yourself, which will also make us see the issue as "claimed".
@ -214,6 +214,16 @@ After you have signed the CLA, the CLA bot would automatically update the PR sta
If it happens that you were unavailable and your PR gets closed, feel free to reopen once it's ready! We are still happy to review it, help you complete it, and eventually merge it.
### AI-assisted PRs
We welcome the use of AI tools for authoring PRs, and we love to see people pushing the boundaries of AI capabilities. The core team actively uses different AI tools in our development process. However, we are aware that **many people are sending entirely AI-generated PRs as a low-effort way to farm OSS contributions**, so please be mindful of the following etiquette to show your respect for our time and our codebase:
- **Be transparent**: If a significant portion of your code is AI generated, please indicate that in your PR description.
- **Be accountable**: You are responsible for the code you submit, regardless of whether it was generated by AI or written by you. You should be able to explain every line of the code, ensure all tests pass, and address our reviews.
- **Be reasonable**: Sometimes we receive 1k LOC PRs that are obviously AI-generated and implement unsolicited features. Please note that significant changes require prior communication and approval from the team in the form of an issue.
We retain the right to close any PR that we deem as unproductive or low-effort, even when we agree with the spirit of the change.
### Breaking Changes
When adding a new breaking change, follow this template in your pull request:
@ -227,6 +237,10 @@ When adding a new breaking change, follow this template in your pull request:
- **Severity (number of people affected x effort)**:
```
> [!NOTE]
>
> Breaking changes should be discussed in the issue tracker before being implemented.
### What Happens Next?
The core Docusaurus team will be monitoring pull requests. Do help us by keeping pull requests consistent by following the guidelines above.

View File

@ -120,4 +120,4 @@ The Docusaurus documentation (e.g., `.md` files in the `/docs` folder) is [Creat
[![Rocket Validator logo](./admin/img/rocketvalidator-logo.png)](https://rocketvalidator.com/)
[Rocket Validator](https://rocketvalidator.com/) helps us find HTML markup or accessibility issues.
[Rocket Validator](https://rocketvalidator.com/) helps us find [HTML markup and accessibility issues](https://rocketvalidator.com/stats/docusaurus.io).

View File

@ -1,6 +1,6 @@
{
"name": "new.docusaurus.io",
"version": "3.8.0",
"version": "3.9.2",
"private": true,
"scripts": {
"start": "npx --package netlify-cli netlify dev"

View File

@ -53,6 +53,8 @@ git diff --name-only -- '*.json' | sed 's, ,\\&,g' | xargs git checkout --
# The website is generated outside the repo to minimize chances of yarn resolving the wrong version
cd ..
echo Generating test-website in `pwd`
# Build skeleton website with new version
npm_config_registry="$CUSTOM_REGISTRY_URL" npx --yes --loglevel silly create-docusaurus@"$NEW_VERSION" test-website classic --javascript $EXTRA_OPTS

View File

@ -1,6 +1,6 @@
{
"name": "test-bad-package",
"version": "3.8.0",
"version": "3.9.2",
"private": true,
"dependencies": {
"@mdx-js/react": "1.0.1",

View File

@ -1,6 +1,6 @@
{
"name": "argos",
"version": "3.8.0",
"version": "3.9.2",
"description": "Argos visual diff tests",
"license": "MIT",
"private": true,

View File

@ -134,11 +134,6 @@ function throwOnConsole(page: Page) {
// it's already happening in main branch
'Failed to load resource: the server responded with a status of 404 (Not Found)',
// TODO legit hydration bugs to fix on embeds of /docs/styling-layout
// useLocation() returns window.search/hash immediately :s
'/docs/configuration?docusaurus-theme=light',
'/docs/configuration?docusaurus-theme=dark',
// Warning because react-live not supporting React automatic JSX runtime
// See https://github.com/FormidableLabs/react-live/issues/405
'Your app (or one of its dependencies) is using an outdated JSX transform. Update to the modern JSX transform for faster performance',

View File

@ -20,6 +20,19 @@ languages_mapping: &languages_mapping
two_letters_code:
pt-BR: pt-BR
# Crowdin regularly update their MDX parser
# Unfortunately, their v2 parser is more "MDX compliant" and thus can't parse
# Docusaurus MDX files correctly due to our custom {#headingId} syntax.
# Adding this type param permits using their older v1.2 parser.
# Note: you can find the version of a file using browser DevTools
# The source file icons will have a class such as "file_type_mdx_v1_2"
#
# TODO fix our headingId syntax
# providing an explicit type is annoying and not future-proof
# there's a risk that when adding an image in /docs, it will be parsed as mdx
# and duplicating source file configs for various extensions is not great either
mdx_file_type: &mdx_file_type mdx_v1_2
#
# Files configuration
#
@ -27,18 +40,33 @@ files:
- source: /website/i18n/en/**/*
translation: /website/i18n/%two_letters_code%/**/%original_file_name%
languages_mapping: *languages_mapping
- source: /website/docs/**/*.mdx
translation: /website/i18n/%two_letters_code%/docusaurus-plugin-content-docs/current/**/%original_file_name%
languages_mapping: *languages_mapping
type: *mdx_file_type
- source: /website/docs/**/*
translation: /website/i18n/%two_letters_code%/docusaurus-plugin-content-docs/current/**/%original_file_name%
languages_mapping: *languages_mapping
- source: /website/community/**/*
translation: /website/i18n/%two_letters_code%/docusaurus-plugin-content-docs-community/current/**/%original_file_name%
ignore: [/**/*.mdx]
- source: /website/versioned_docs/**/*.mdx
translation: /website/i18n/%two_letters_code%/docusaurus-plugin-content-docs/**/%original_file_name%
languages_mapping: *languages_mapping
type: *mdx_file_type
- source: /website/versioned_docs/**/*
translation: /website/i18n/%two_letters_code%/docusaurus-plugin-content-docs/**/%original_file_name%
languages_mapping: *languages_mapping
ignore: [/**/*.mdx]
- source: /website/community/**/*
translation: /website/i18n/%two_letters_code%/docusaurus-plugin-content-docs-community/current/**/%original_file_name%
languages_mapping: *languages_mapping
- source: /website/blog/**/*
translation: /website/i18n/%two_letters_code%/docusaurus-plugin-content-blog/**/%original_file_name%
languages_mapping: *languages_mapping
- source: /website/src/pages/**/*
translation: /website/i18n/%two_letters_code%/docusaurus-plugin-content-pages/**/%original_file_name%
ignore: [/**/*.js, /**/*.jsx, /**/*.ts, /**/*.tsx, /**/*.css]

View File

@ -14,7 +14,7 @@ Or **try Docusaurus immediately** with **[docusaurus.new](https://docusaurus.new
### What you'll need
- [Node.js](https://nodejs.org/en/download/) version 18.0 or above:
- [Node.js](https://nodejs.org/en/download/) version 20.0 or above:
- When installing Node.js, you are recommended to check all checkboxes related to dependencies.
## Generate a new site

View File

@ -26,7 +26,6 @@ const config: Config = {
projectName: 'docusaurus', // Usually your repo name.
onBrokenLinks: 'throw',
onBrokenMarkdownLinks: 'warn',
// Even if you don't use internationalization, you can use this field to set
// useful metadata like html lang. For example, if your site is Chinese, you
@ -72,6 +71,9 @@ const config: Config = {
themeConfig: {
// Replace with your project's social card
image: 'img/docusaurus-social-card.jpg',
colorMode: {
respectPrefersColorScheme: true,
},
navbar: {
title: 'My Site',
logo: {

View File

@ -16,8 +16,8 @@
"dev": "docusaurus start"
},
"dependencies": {
"@docusaurus/core": "3.8.0",
"@docusaurus/preset-classic": "3.8.0",
"@docusaurus/core": "3.9.2",
"@docusaurus/preset-classic": "3.9.2",
"@mdx-js/react": "^3.0.0",
"clsx": "^2.0.0",
"prism-react-renderer": "^2.3.0",
@ -25,9 +25,9 @@
"react-dom": "^19.0.0"
},
"devDependencies": {
"@docusaurus/module-type-aliases": "3.8.0",
"@docusaurus/tsconfig": "3.8.0",
"@docusaurus/types": "3.8.0",
"@docusaurus/module-type-aliases": "3.9.2",
"@docusaurus/tsconfig": "3.9.2",
"@docusaurus/types": "3.9.2",
"typescript": "~5.6.2"
},
"browserslist": {
@ -43,7 +43,7 @@
]
},
"engines": {
"node": ">=18.0"
"node": ">=20.0"
},
"description": "Docusaurus example project (classic-typescript template)"
}

File diff suppressed because it is too large Load Diff

View File

@ -14,7 +14,7 @@ Or **try Docusaurus immediately** with **[docusaurus.new](https://docusaurus.new
### What you'll need
- [Node.js](https://nodejs.org/en/download/) version 18.0 or above:
- [Node.js](https://nodejs.org/en/download/) version 20.0 or above:
- When installing Node.js, you are recommended to check all checkboxes related to dependencies.
## Generate a new site

View File

@ -31,7 +31,6 @@ const config = {
projectName: 'docusaurus', // Usually your repo name.
onBrokenLinks: 'throw',
onBrokenMarkdownLinks: 'warn',
// Even if you don't use internationalization, you can use this field to set
// useful metadata like html lang. For example, if your site is Chinese, you
@ -80,6 +79,9 @@ const config = {
({
// Replace with your project's social card
image: 'img/docusaurus-social-card.jpg',
colorMode: {
respectPrefersColorScheme: true,
},
navbar: {
title: 'My Site',
logo: {

View File

@ -15,8 +15,8 @@
"dev": "docusaurus start"
},
"dependencies": {
"@docusaurus/core": "3.8.0",
"@docusaurus/preset-classic": "3.8.0",
"@docusaurus/core": "3.9.2",
"@docusaurus/preset-classic": "3.9.2",
"@mdx-js/react": "^3.0.0",
"clsx": "^2.0.0",
"prism-react-renderer": "^2.3.0",
@ -24,8 +24,8 @@
"react-dom": "^19.0.0"
},
"devDependencies": {
"@docusaurus/module-type-aliases": "3.8.0",
"@docusaurus/types": "3.8.0"
"@docusaurus/module-type-aliases": "3.9.2",
"@docusaurus/types": "3.9.2"
},
"browserslist": {
"production": [
@ -40,7 +40,7 @@
]
},
"engines": {
"node": ">=18.0"
"node": ">=20.0"
},
"description": "Docusaurus example project"
}

File diff suppressed because it is too large Load Diff

7
jest.config.mjs vendored
View File

@ -32,15 +32,16 @@ const ignorePatterns = [
export default {
rootDir: fileURLToPath(new URL('.', import.meta.url)),
verbose: true,
// Default 5s timeout often fails on Windows :s,
// see https://github.com/facebook/docusaurus/pull/8259
testTimeout: 15000,
setupFiles: ['./jest/setup.js'],
testEnvironmentOptions: {
url: 'https://docusaurus.io/',
},
testEnvironment: 'node',
testPathIgnorePatterns: ignorePatterns,
// Default 5s timeout often fails on Windows :s,
// see https://github.com/facebook/docusaurus/pull/8259
testTimeout: 15000,
watchPathIgnorePatterns: ['/\\.docusaurus'],
coveragePathIgnorePatterns: [
...ignorePatterns,
// We also ignore all package entry points

View File

@ -1,5 +1,5 @@
{
"version": "3.8.0",
"version": "3.9.2",
"npmClient": "yarn",
"useWorkspaces": true,
"useNx": false,

View File

@ -1,6 +1,6 @@
{
"name": "create-docusaurus",
"version": "3.8.0",
"version": "3.9.2",
"description": "Create Docusaurus apps easily.",
"type": "module",
"repository": {
@ -22,8 +22,8 @@
},
"license": "MIT",
"dependencies": {
"@docusaurus/logger": "3.8.0",
"@docusaurus/utils": "3.8.0",
"@docusaurus/logger": "3.9.2",
"@docusaurus/utils": "3.9.2",
"commander": "^5.1.0",
"execa": "5.1.1",
"fs-extra": "^11.1.1",
@ -37,6 +37,6 @@
"@types/supports-color": "^8.1.1"
},
"engines": {
"node": ">=18.0"
"node": ">=20.0"
}
}

View File

@ -26,7 +26,6 @@ const config: Config = {
projectName: 'docusaurus', // Usually your repo name.
onBrokenLinks: 'throw',
onBrokenMarkdownLinks: 'warn',
// Even if you don't use internationalization, you can use this field to set
// useful metadata like html lang. For example, if your site is Chinese, you
@ -72,6 +71,9 @@ const config: Config = {
themeConfig: {
// Replace with your project's social card
image: 'img/docusaurus-social-card.jpg',
colorMode: {
respectPrefersColorScheme: true,
},
navbar: {
title: 'My Site',
logo: {

View File

@ -1,6 +1,6 @@
{
"name": "docusaurus-2-classic-typescript-template",
"version": "3.8.0",
"version": "3.9.2",
"private": true,
"scripts": {
"docusaurus": "docusaurus",
@ -15,8 +15,8 @@
"typecheck": "tsc"
},
"dependencies": {
"@docusaurus/core": "3.8.0",
"@docusaurus/preset-classic": "3.8.0",
"@docusaurus/core": "3.9.2",
"@docusaurus/preset-classic": "3.9.2",
"@mdx-js/react": "^3.0.0",
"clsx": "^2.0.0",
"prism-react-renderer": "^2.3.0",
@ -24,9 +24,9 @@
"react-dom": "^19.0.0"
},
"devDependencies": {
"@docusaurus/module-type-aliases": "3.8.0",
"@docusaurus/tsconfig": "3.8.0",
"@docusaurus/types": "3.8.0",
"@docusaurus/module-type-aliases": "3.9.2",
"@docusaurus/tsconfig": "3.9.2",
"@docusaurus/types": "3.9.2",
"typescript": "~5.6.2"
},
"browserslist": {
@ -42,6 +42,6 @@
]
},
"engines": {
"node": ">=18.0"
"node": ">=20.0"
}
}

View File

@ -31,7 +31,6 @@ const config = {
projectName: 'docusaurus', // Usually your repo name.
onBrokenLinks: 'throw',
onBrokenMarkdownLinks: 'warn',
// Even if you don't use internationalization, you can use this field to set
// useful metadata like html lang. For example, if your site is Chinese, you
@ -80,6 +79,9 @@ const config = {
({
// Replace with your project's social card
image: 'img/docusaurus-social-card.jpg',
colorMode: {
respectPrefersColorScheme: true,
},
navbar: {
title: 'My Site',
logo: {

View File

@ -1,6 +1,6 @@
{
"name": "docusaurus-2-classic-template",
"version": "3.8.0",
"version": "3.9.2",
"private": true,
"scripts": {
"docusaurus": "docusaurus",
@ -14,8 +14,8 @@
"write-heading-ids": "docusaurus write-heading-ids"
},
"dependencies": {
"@docusaurus/core": "3.8.0",
"@docusaurus/preset-classic": "3.8.0",
"@docusaurus/core": "3.9.2",
"@docusaurus/preset-classic": "3.9.2",
"@mdx-js/react": "^3.0.0",
"clsx": "^2.0.0",
"prism-react-renderer": "^2.3.0",
@ -23,8 +23,8 @@
"react-dom": "^19.0.0"
},
"devDependencies": {
"@docusaurus/module-type-aliases": "3.8.0",
"@docusaurus/types": "3.8.0"
"@docusaurus/module-type-aliases": "3.9.2",
"@docusaurus/types": "3.9.2"
},
"browserslist": {
"production": [
@ -39,6 +39,6 @@
]
},
"engines": {
"node": ">=18.0"
"node": ">=20.0"
}
}

View File

@ -14,7 +14,7 @@ Or **try Docusaurus immediately** with **[docusaurus.new](https://docusaurus.new
### What you'll need
- [Node.js](https://nodejs.org/en/download/) version 18.0 or above:
- [Node.js](https://nodejs.org/en/download/) version 20.0 or above:
- When installing Node.js, you are recommended to check all checkboxes related to dependencies.
## Generate a new site

View File

@ -1,6 +1,6 @@
{
"name": "@docusaurus/babel",
"version": "3.8.0",
"version": "3.9.2",
"description": "Docusaurus package for Babel-related utils.",
"main": "./lib/index.js",
"types": "./lib/index.d.ts",
@ -38,13 +38,13 @@
"@babel/runtime": "^7.25.9",
"@babel/runtime-corejs3": "^7.25.9",
"@babel/traverse": "^7.25.9",
"@docusaurus/logger": "3.8.0",
"@docusaurus/utils": "3.8.0",
"@docusaurus/logger": "3.9.2",
"@docusaurus/utils": "3.9.2",
"babel-plugin-dynamic-import-node": "^2.3.3",
"fs-extra": "^11.1.1",
"tslib": "^2.6.0"
},
"engines": {
"node": ">=18.0"
"node": ">=20.0"
}
}

View File

@ -1,6 +1,6 @@
{
"name": "@docusaurus/bundler",
"version": "3.8.0",
"version": "3.9.2",
"description": "Docusaurus util package to abstract the current bundler.",
"main": "./lib/index.js",
"types": "./lib/index.d.ts",
@ -19,24 +19,24 @@
"license": "MIT",
"dependencies": {
"@babel/core": "^7.25.9",
"@docusaurus/babel": "3.8.0",
"@docusaurus/cssnano-preset": "3.8.0",
"@docusaurus/logger": "3.8.0",
"@docusaurus/types": "3.8.0",
"@docusaurus/utils": "3.8.0",
"@docusaurus/babel": "3.9.2",
"@docusaurus/cssnano-preset": "3.9.2",
"@docusaurus/logger": "3.9.2",
"@docusaurus/types": "3.9.2",
"@docusaurus/utils": "3.9.2",
"babel-loader": "^9.2.1",
"clean-css": "^5.3.2",
"clean-css": "^5.3.3",
"copy-webpack-plugin": "^11.0.0",
"css-loader": "^6.8.1",
"css-loader": "^6.11.0",
"css-minimizer-webpack-plugin": "^5.0.1",
"cssnano": "^6.1.2",
"file-loader": "^6.2.0",
"html-minifier-terser": "^7.2.0",
"mini-css-extract-plugin": "^2.9.1",
"mini-css-extract-plugin": "^2.9.2",
"null-loader": "^4.0.1",
"postcss": "^8.4.26",
"postcss-loader": "^7.3.3",
"postcss-preset-env": "^10.1.0",
"postcss": "^8.5.4",
"postcss-loader": "^7.3.4",
"postcss-preset-env": "^10.2.1",
"terser-webpack-plugin": "^5.3.9",
"tslib": "^2.6.0",
"url-loader": "^4.1.1",
@ -55,6 +55,6 @@
"@total-typescript/shoehorn": "^0.1.2"
},
"engines": {
"node": ">=18.0"
"node": ">=20.0"
}
}

View File

@ -129,8 +129,8 @@ export async function registerBundlerTracing({
await Rspack.experiments.globalTrace.register(
filter,
'chrome',
'./rspack-tracing.json',
'perfetto',
'./rspack-tracing.pftrace',
);
console.info(`Rspack tracing registered, filter=${filter}`);

View File

@ -28,7 +28,7 @@ async function createSwcJsLoaderFactory(): Promise<
return ({isServer}) => {
return {
loader,
options: getOptions({isServer}),
options: getOptions({isServer, bundlerName: 'webpack'}),
};
};
}
@ -42,7 +42,7 @@ async function createRspackSwcJsLoaderFactory(): Promise<
return ({isServer}) => {
return {
loader,
options: getOptions({isServer}),
options: getOptions({isServer, bundlerName: 'rspack'}),
};
};
}

View File

@ -142,7 +142,10 @@ async function getRspackMinimizers({
}: MinimizersConfig): Promise<WebpackPluginInstance[]> {
const rspack = getCurrentBundlerAsRspack({currentBundler});
const getBrowserslistQueries = await importGetBrowserslistQueries();
const browserslistQueries = getBrowserslistQueries({isServer: false});
const browserslistQueries = getBrowserslistQueries({
isServer: false,
bundlerName: 'rspack',
});
const swcJsMinimizerOptions = await importSwcJsMinimizerOptions();
return [
// See https://rspack.dev/plugins/rspack/swc-js-minimizer-rspack-plugin

View File

@ -84,6 +84,10 @@ async function getSwcMinifier(): Promise<HtmlMinifier> {
// TODO maybe it's fine to only keep <!-- --> React comments?
preserveComments: [],
// Keep <head> tag: important for social image crawlers like LinkedIn
// See https://github.com/swc-project/swc/issues/10994
tagOmission: 'keep-head-and-body',
// Sorting these attributes (class) can lead to React hydration errors
sortSpaceSeparatedAttributeValues: false,
sortAttributes: false,

View File

@ -1,6 +1,6 @@
{
"name": "@docusaurus/cssnano-preset",
"version": "3.8.0",
"version": "3.9.2",
"description": "Advanced cssnano preset for maximum optimization.",
"main": "lib/index.js",
"license": "MIT",
@ -18,7 +18,7 @@
},
"dependencies": {
"cssnano-preset-advanced": "^6.1.2",
"postcss": "^8.4.38",
"postcss": "^8.5.4",
"postcss-sort-media-queries": "^5.2.0",
"tslib": "^2.6.0"
},
@ -26,6 +26,6 @@
"to-vfile": "^6.1.0"
},
"engines": {
"node": ">=18.0"
"node": ">=20.0"
}
}

View File

@ -13,6 +13,9 @@ const preset: typeof advancedBasePreset = function preset(opts) {
const advancedPreset = advancedBasePreset({
autoprefixer: {add: false},
discardComments: {removeAll: true},
// See CodeBlock custom line number bug: https://github.com/facebook/docusaurus/pull/11487
/* cSpell:ignore Idents */
reduceIdents: {counter: false},
/* cSpell:ignore zindex */
zindex: false,
...opts,

View File

@ -1,6 +1,6 @@
{
"name": "@docusaurus/faster",
"version": "3.8.0",
"version": "3.9.2",
"description": "Docusaurus experimental package exposing new modern dependencies to make the build faster.",
"main": "./lib/index.js",
"types": "./lib/index.d.ts",
@ -18,18 +18,19 @@
},
"license": "MIT",
"dependencies": {
"@docusaurus/types": "3.8.0",
"@rspack/core": "^1.3.10",
"@docusaurus/types": "3.9.2",
"@rspack/core": "^1.5.0",
"@swc/core": "^1.7.39",
"@swc/html": "^1.7.39",
"@swc/html": "^1.13.5",
"browserslist": "^4.24.2",
"lightningcss": "^1.27.0",
"semver": "^7.5.4",
"swc-loader": "^0.2.6",
"tslib": "^2.6.0",
"webpack": "^5.95.0"
},
"engines": {
"node": ">=18.0"
"node": ">=20.0"
},
"peerDependencies": {
"@docusaurus/types": "*"

View File

@ -9,18 +9,22 @@ import Rspack from '@rspack/core';
import * as lightningcss from 'lightningcss';
import browserslist from 'browserslist';
import {minify as swcHtmlMinifier} from '@swc/html';
import semver from 'semver';
import type {JsMinifyOptions, Options as SwcOptions} from '@swc/core';
import type {CurrentBundler} from '@docusaurus/types';
export const swcLoader = require.resolve('swc-loader');
export const getSwcLoaderOptions = ({
isServer,
bundlerName,
}: {
isServer: boolean;
bundlerName: CurrentBundler['name'];
}): SwcOptions => {
return {
env: {
targets: getBrowserslistQueries({isServer}),
targets: getBrowserslistQueries({isServer, bundlerName}),
},
jsc: {
parser: {
@ -63,20 +67,53 @@ export function getSwcJsMinimizerOptions(): JsMinifyOptions {
};
}
// TODO this is not accurate
// for Rspack we should read from the built-in browserslist data
// see https://github.com/facebook/docusaurus/pull/11496
function getLastBrowserslistKnownNodeVersion(
bundlerName: CurrentBundler['name'],
): string {
if (bundlerName === 'rspack') {
// TODO hardcoded value until Rspack exposes its Browserslist data
// see https://github.com/facebook/docusaurus/pull/11496
return '22.0.0';
}
// browserslist('last 1 node versions')[0]!.replace('node ', '')
return browserslist.nodeVersions.at(-1)!;
}
function getMinVersion(v1: string, v2: string): string {
return semver.lt(v1, v2) ? v1 : v2;
}
// We need this because of Rspack built-in LightningCSS integration
// See https://github.com/orgs/browserslist/discussions/846
export function getBrowserslistQueries({
isServer,
bundlerName,
}: {
isServer: boolean;
bundlerName: CurrentBundler['name'];
}): string[] {
if (isServer) {
return [`node ${process.versions.node}`];
// Escape hatch env variable
if (process.env.DOCUSAURUS_SERVER_NODE_TARGET) {
return [`node ${process.env.DOCUSAURUS_SERVER_NODE_TARGET}`];
}
// For server builds, we want to use the current Node version as target
// But we can't pass a target that Browserslist doesn't know about yet
const nodeTarget = getMinVersion(
process.versions.node,
getLastBrowserslistKnownNodeVersion(bundlerName),
);
return [`node ${nodeTarget}`];
}
const queries = browserslist.loadConfig({path: process.cwd()}) ?? [
...browserslist.defaults,
];
return queries;
}

View File

@ -1,6 +1,6 @@
{
"name": "@docusaurus/logger",
"version": "3.8.0",
"version": "3.9.2",
"description": "An encapsulated logger for semantically formatting console messages.",
"main": "./lib/index.js",
"repository": {
@ -24,7 +24,7 @@
"tslib": "^2.6.0"
},
"engines": {
"node": ">=18.0"
"node": ">=20.0"
},
"devDependencies": {
"@types/supports-color": "^8.1.1"

View File

@ -1,6 +1,6 @@
{
"name": "@docusaurus/mdx-loader",
"version": "3.8.0",
"version": "3.9.2",
"description": "Docusaurus Loader for MDX",
"main": "lib/index.js",
"types": "lib/index.d.ts",
@ -18,9 +18,9 @@
},
"license": "MIT",
"dependencies": {
"@docusaurus/logger": "3.8.0",
"@docusaurus/utils": "3.8.0",
"@docusaurus/utils-validation": "3.8.0",
"@docusaurus/logger": "3.9.2",
"@docusaurus/utils": "3.9.2",
"@docusaurus/utils-validation": "3.9.2",
"@mdx-js/mdx": "^3.0.0",
"@slorber/remark-comment": "^1.0.0",
"escape-html": "^1.0.3",
@ -44,7 +44,7 @@
"webpack": "^5.88.1"
},
"devDependencies": {
"@docusaurus/types": "3.8.0",
"@docusaurus/types": "3.9.2",
"@types/escape-html": "^1.0.2",
"@types/mdast": "^4.0.2",
"@types/stringify-object": "^3.3.1",
@ -62,6 +62,6 @@
"react-dom": "^18.0.0 || ^19.0.0"
},
"engines": {
"node": ">=18.0"
"node": ">=20.0"
}
}

View File

@ -22,6 +22,9 @@ import type {WebpackCompilerName} from '@docusaurus/utils';
import type {MDXFrontMatter} from './frontMatter';
import type {Options} from './options';
import type {AdmonitionOptions} from './remark/admonitions';
import type {PluginOptions as ResolveMarkdownLinksOptions} from './remark/resolveMarkdownLinks';
import type {PluginOptions as TransformLinksOptions} from './remark/transformLinks';
import type {PluginOptions as TransformImageOptions} from './remark/transformImage';
import type {ProcessorOptions} from '@mdx-js/mdx';
// TODO as of April 2023, no way to import/re-export this ESM type easily :/
@ -92,7 +95,7 @@ async function createProcessorFactory() {
headings,
{anchorsMaintainCase: options.markdownConfig.anchors.maintainCase},
],
emoji,
...(options.markdownConfig.emoji ? [emoji] : []),
toc,
];
}
@ -121,13 +124,19 @@ async function createProcessorFactory() {
{
staticDirs: options.staticDirs,
siteDir: options.siteDir,
},
onBrokenMarkdownImages:
options.markdownConfig.hooks.onBrokenMarkdownImages,
} satisfies TransformImageOptions,
],
// TODO merge this with transformLinks?
options.resolveMarkdownLink
? [
resolveMarkdownLinks,
{resolveMarkdownLink: options.resolveMarkdownLink},
{
resolveMarkdownLink: options.resolveMarkdownLink,
onBrokenMarkdownLinks:
options.markdownConfig.hooks.onBrokenMarkdownLinks,
} satisfies ResolveMarkdownLinksOptions,
]
: undefined,
[
@ -135,7 +144,9 @@ async function createProcessorFactory() {
{
staticDirs: options.staticDirs,
siteDir: options.siteDir,
},
onBrokenMarkdownLinks:
options.markdownConfig.hooks.onBrokenMarkdownLinks,
} satisfies TransformLinksOptions,
],
gfm,
options.markdownConfig.mdx1Compat.comments ? comment : null,

View File

@ -5,22 +5,47 @@
* LICENSE file in the root directory of this source tree.
*/
import {jest} from '@jest/globals';
import * as path from 'path';
import plugin from '..';
import type {PluginOptions} from '../index';
async function process(content: string) {
const {remark} = await import('remark');
const siteDir = __dirname;
const options: PluginOptions = {
resolveMarkdownLink: ({linkPathname}) => `/RESOLVED---${linkPathname}`,
const DefaultTestOptions: PluginOptions = {
resolveMarkdownLink: ({linkPathname}) => `/RESOLVED---${linkPathname}`,
onBrokenMarkdownLinks: 'throw',
};
async function process(content: string, optionsInput?: Partial<PluginOptions>) {
const options = {
...DefaultTestOptions,
...optionsInput,
};
const result = await remark().use(plugin, options).process(content);
const {remark} = await import('remark');
const result = await remark()
.use(plugin, options)
.process({
value: content,
path: path.posix.join(siteDir, 'docs', 'myFile.mdx'),
});
return result.value;
}
describe('resolveMarkdownLinks remark plugin', () => {
it('accepts non-md link', async () => {
/* language=markdown */
const content = `[link1](link1)`;
const result = await process(content);
expect(result).toMatchInlineSnapshot(`
"[link1](link1)
"
`);
});
it('resolves Markdown and MDX links', async () => {
/* language=markdown */
const content = `[link1](link1.mdx)
@ -157,4 +182,212 @@ this is a code block
"
`);
});
describe('onBrokenMarkdownLinks', () => {
const warnMock = jest.spyOn(console, 'warn').mockImplementation(() => {});
beforeEach(() => {
warnMock.mockClear();
});
async function processResolutionErrors(
content: string,
onBrokenMarkdownLinks: PluginOptions['onBrokenMarkdownLinks'] = 'throw',
) {
return process(content, {
resolveMarkdownLink: () => null,
onBrokenMarkdownLinks,
});
}
describe('throws', () => {
it('for unresolvable mdx link', async () => {
/* language=markdown */
const content = `[link1](link1.mdx)`;
await expect(() => processResolutionErrors(content)).rejects
.toThrowErrorMatchingInlineSnapshot(`
"Markdown link with URL \`link1.mdx\` in source file "packages/docusaurus-mdx-loader/src/remark/resolveMarkdownLinks/__tests__/docs/myFile.mdx" (1:1) couldn't be resolved.
Make sure it references a local Markdown file that exists within the current plugin.
To ignore this error, use the \`siteConfig.markdown.hooks.onBrokenMarkdownLinks\` option, or apply the \`pathname://\` protocol to the broken link URLs."
`);
});
it('for unresolvable md link', async () => {
/* language=markdown */
const content = `[link1](link1.md)`;
await expect(() => processResolutionErrors(content)).rejects
.toThrowErrorMatchingInlineSnapshot(`
"Markdown link with URL \`link1.md\` in source file "packages/docusaurus-mdx-loader/src/remark/resolveMarkdownLinks/__tests__/docs/myFile.mdx" (1:1) couldn't be resolved.
Make sure it references a local Markdown file that exists within the current plugin.
To ignore this error, use the \`siteConfig.markdown.hooks.onBrokenMarkdownLinks\` option, or apply the \`pathname://\` protocol to the broken link URLs."
`);
});
});
describe('warns', () => {
it('for unresolvable md and mdx link', async () => {
/* language=markdown */
const content = `
[link1](link1.mdx)
[link2](link2)
[link3](dir/link3.md)
[link 4](/link/4)
`;
const result = await processResolutionErrors(content, 'warn');
expect(result).toMatchInlineSnapshot(`
"[link1](link1.mdx)
[link2](link2)
[link3](dir/link3.md)
[link 4](/link/4)
"
`);
expect(warnMock).toHaveBeenCalledTimes(2);
expect(warnMock.mock.calls).toMatchInlineSnapshot(`
[
[
"[WARNING] Markdown link with URL \`link1.mdx\` in source file "packages/docusaurus-mdx-loader/src/remark/resolveMarkdownLinks/__tests__/docs/myFile.mdx" (2:1) couldn't be resolved.
Make sure it references a local Markdown file that exists within the current plugin.",
],
[
"[WARNING] Markdown link with URL \`dir/link3.md\` in source file "packages/docusaurus-mdx-loader/src/remark/resolveMarkdownLinks/__tests__/docs/myFile.mdx" (6:1) couldn't be resolved.
Make sure it references a local Markdown file that exists within the current plugin.",
],
]
`);
});
it('for unresolvable md and mdx link - with recovery', async () => {
/* language=markdown */
const content = `
[link1](link1.mdx)
[link2](link2)
[link3](dir/link3.md?query#hash)
[link 4](/link/4)
`;
const result = await processResolutionErrors(content, (params) => {
console.warn(`onBrokenMarkdownLinks called with`, params);
// We can alter the AST Node
params.node.title = 'fixed link title';
params.node.url = 'ignored, less important than returned value';
// Or return a new URL
return `/recovered-link`;
});
expect(result).toMatchInlineSnapshot(`
"[link1](/recovered-link "fixed link title")
[link2](link2)
[link3](/recovered-link "fixed link title")
[link 4](/link/4)
"
`);
expect(warnMock).toHaveBeenCalledTimes(2);
expect(warnMock.mock.calls).toMatchInlineSnapshot(`
[
[
"onBrokenMarkdownLinks called with",
{
"node": {
"children": [
{
"position": {
"end": {
"column": 7,
"line": 2,
"offset": 7,
},
"start": {
"column": 2,
"line": 2,
"offset": 2,
},
},
"type": "text",
"value": "link1",
},
],
"position": {
"end": {
"column": 19,
"line": 2,
"offset": 19,
},
"start": {
"column": 1,
"line": 2,
"offset": 1,
},
},
"title": "fixed link title",
"type": "link",
"url": "/recovered-link",
},
"sourceFilePath": "packages/docusaurus-mdx-loader/src/remark/resolveMarkdownLinks/__tests__/docs/myFile.mdx",
"url": "link1.mdx",
},
],
[
"onBrokenMarkdownLinks called with",
{
"node": {
"children": [
{
"position": {
"end": {
"column": 7,
"line": 6,
"offset": 43,
},
"start": {
"column": 2,
"line": 6,
"offset": 38,
},
},
"type": "text",
"value": "link3",
},
],
"position": {
"end": {
"column": 33,
"line": 6,
"offset": 69,
},
"start": {
"column": 1,
"line": 6,
"offset": 37,
},
},
"title": "fixed link title",
"type": "link",
"url": "/recovered-link",
},
"sourceFilePath": "packages/docusaurus-mdx-loader/src/remark/resolveMarkdownLinks/__tests__/docs/myFile.mdx",
"url": "dir/link3.md?query#hash",
},
],
]
`);
});
});
});
});

View File

@ -8,11 +8,18 @@
import {
parseLocalURLPath,
serializeURLPath,
toMessageRelativeFilePath,
type URLPath,
} from '@docusaurus/utils';
import logger from '@docusaurus/logger';
import {formatNodePositionExtraMessage} from '../utils';
import type {Plugin, Transformer} from 'unified';
import type {Definition, Link, Root} from 'mdast';
import type {
MarkdownConfig,
OnBrokenMarkdownLinksFunction,
} from '@docusaurus/types';
type ResolveMarkdownLinkParams = {
/**
@ -32,6 +39,33 @@ export type ResolveMarkdownLink = (
export interface PluginOptions {
resolveMarkdownLink: ResolveMarkdownLink;
onBrokenMarkdownLinks: MarkdownConfig['hooks']['onBrokenMarkdownLinks'];
}
function asFunction(
onBrokenMarkdownLinks: PluginOptions['onBrokenMarkdownLinks'],
): OnBrokenMarkdownLinksFunction {
if (typeof onBrokenMarkdownLinks === 'string') {
const extraHelp =
onBrokenMarkdownLinks === 'throw'
? logger.interpolate`\nTo ignore this error, use the code=${'siteConfig.markdown.hooks.onBrokenMarkdownLinks'} option, or apply the code=${'pathname://'} protocol to the broken link URLs.`
: '';
return ({sourceFilePath, url: linkUrl, node}) => {
const relativePath = toMessageRelativeFilePath(sourceFilePath);
logger.report(
onBrokenMarkdownLinks,
)`Markdown link with URL code=${linkUrl} in source file path=${relativePath}${formatNodePositionExtraMessage(
node,
)} couldn't be resolved.
Make sure it references a local Markdown file that exists within the current plugin.${extraHelp}`;
};
} else {
return (params) =>
onBrokenMarkdownLinks({
...params,
sourceFilePath: toMessageRelativeFilePath(params.sourceFilePath),
});
}
}
const HAS_MARKDOWN_EXTENSION = /\.mdx?$/i;
@ -57,10 +91,15 @@ function parseMarkdownLinkURLPath(link: string): URLPath | null {
* This is exposed as "data.contentTitle" to the processed vfile
* Also gives the ability to strip that content title (used for the blog plugin)
*/
// TODO merge this plugin with "transformLinks"
// in general we'd want to avoid traversing multiple times the same AST
const plugin: Plugin<PluginOptions[], Root> = function plugin(
options,
): Transformer<Root> {
const {resolveMarkdownLink} = options;
const onBrokenMarkdownLinks = asFunction(options.onBrokenMarkdownLinks);
return async (root, file) => {
const {visit} = await import('unist-util-visit');
@ -71,18 +110,26 @@ const plugin: Plugin<PluginOptions[], Root> = function plugin(
return;
}
const sourceFilePath = file.path;
const permalink = resolveMarkdownLink({
sourceFilePath: file.path,
sourceFilePath,
linkPathname: linkURLPath.pathname,
});
if (permalink) {
// This reapplies the link ?qs#hash part to the resolved pathname
const resolvedUrl = serializeURLPath({
link.url = serializeURLPath({
...linkURLPath,
pathname: permalink,
});
link.url = resolvedUrl;
} else {
link.url =
onBrokenMarkdownLinks({
url: link.url,
sourceFilePath,
node: link,
}) ?? link.url;
}
});
};

View File

@ -1 +0,0 @@
![img](/img/doesNotExist.png)

View File

@ -1 +0,0 @@
![img](./notFound.png)

View File

@ -1 +0,0 @@
![invalid image](/invalid.png)

View File

@ -1 +0,0 @@
![img](pathname:///img/unchecked.png)

View File

@ -1,16 +1,10 @@
// Jest Snapshot v1, https://goo.gl/fbAQLP
exports[`transformImage plugin does not choke on invalid image 1`] = `
"<img alt="invalid image" src={require("!<PROJECT_ROOT>/node_modules/url-loader/dist/cjs.js?limit=10000&name=assets/images/[name]-[contenthash].[ext]&fallback=<PROJECT_ROOT>/node_modules/file-loader/dist/cjs.js!./static/invalid.png").default} />
"<img alt="invalid image" src={require("!<PROJECT_ROOT>/node_modules/url-loader/dist/cjs.js?limit=10000&name=assets/images/[name]-[contenthash].[ext]&fallback=<PROJECT_ROOT>/node_modules/file-loader/dist/cjs.js!./../static/invalid.png").default} />
"
`;
exports[`transformImage plugin fail if image does not exist 1`] = `"Image packages/docusaurus-mdx-loader/src/remark/transformImage/__tests__/__fixtures__/static/img/doesNotExist.png or packages/docusaurus-mdx-loader/src/remark/transformImage/__tests__/__fixtures__/static2/img/doesNotExist.png used in packages/docusaurus-mdx-loader/src/remark/transformImage/__tests__/__fixtures__/fail.md not found."`;
exports[`transformImage plugin fail if image relative path does not exist 1`] = `"Image packages/docusaurus-mdx-loader/src/remark/transformImage/__tests__/__fixtures__/notFound.png used in packages/docusaurus-mdx-loader/src/remark/transformImage/__tests__/__fixtures__/fail2.md not found."`;
exports[`transformImage plugin fail if image url is absent 1`] = `"Markdown image URL is mandatory in "packages/docusaurus-mdx-loader/src/remark/transformImage/__tests__/__fixtures__/noUrl.md" file"`;
exports[`transformImage plugin pathname protocol 1`] = `
"![img](/img/unchecked.png)
"

View File

@ -6,65 +6,361 @@
*/
import {jest} from '@jest/globals';
import path from 'path';
import * as path from 'path';
import vfile from 'to-vfile';
import plugin, {type PluginOptions} from '../index';
const processFixture = async (
name: string,
options: Partial<PluginOptions>,
) => {
const {remark} = await import('remark');
const {default: mdx} = await import('remark-mdx');
const filePath = path.join(__dirname, `__fixtures__/${name}.md`);
const file = await vfile.read(filePath);
const result = await remark()
.use(mdx)
.use(plugin, {siteDir: __dirname, staticDirs: [], ...options})
.process(file);
return result.value;
};
const siteDir = path.join(__dirname, '__fixtures__');
const staticDirs = [
path.join(__dirname, '__fixtures__/static'),
path.join(__dirname, '__fixtures__/static2'),
];
const siteDir = path.join(__dirname, '__fixtures__');
const getProcessor = async (options?: Partial<PluginOptions>) => {
const {remark} = await import('remark');
const {default: mdx} = await import('remark-mdx');
return remark()
.use(mdx)
.use(plugin, {
siteDir,
staticDirs,
onBrokenMarkdownImages: 'throw',
...options,
});
};
const processFixture = async (
name: string,
options?: Partial<PluginOptions>,
) => {
const filePath = path.join(__dirname, `__fixtures__/${name}.md`);
const file = await vfile.read(filePath);
const processor = await getProcessor(options);
const result = await processor.process(file);
return result.value;
};
const processContent = async (
content: string,
options?: Partial<PluginOptions>,
) => {
const processor = await getProcessor(options);
const result = await processor.process({
value: content,
path: path.posix.join(siteDir, 'docs', 'myFile.mdx'),
});
return result.value.toString();
};
describe('transformImage plugin', () => {
it('fail if image does not exist', async () => {
await expect(
processFixture('fail', {staticDirs}),
).rejects.toThrowErrorMatchingSnapshot();
});
it('fail if image relative path does not exist', async () => {
await expect(
processFixture('fail2', {staticDirs}),
).rejects.toThrowErrorMatchingSnapshot();
});
it('fail if image url is absent', async () => {
await expect(
processFixture('noUrl', {staticDirs}),
).rejects.toThrowErrorMatchingSnapshot();
});
it('transform md images to <img />', async () => {
const result = await processFixture('img', {staticDirs, siteDir});
// TODO split that large fixture into many smaller test cases?
const result = await processFixture('img');
expect(result).toMatchSnapshot();
});
it('pathname protocol', async () => {
const result = await processFixture('pathname', {staticDirs});
const result = await processContent(
`![img](pathname:///img/unchecked.png)`,
);
expect(result).toMatchSnapshot();
});
it('does not choke on invalid image', async () => {
const errorMock = jest.spyOn(console, 'warn').mockImplementation(() => {});
const result = await processFixture('invalid-img', {staticDirs});
const result = await processContent(`![invalid image](/invalid.png)`);
expect(result).toMatchSnapshot();
expect(errorMock).toHaveBeenCalledTimes(1);
});
describe('onBrokenMarkdownImages', () => {
const fixtures = {
doesNotExistAbsolute: `![img](/img/doesNotExist.png)`,
doesNotExistRelative: `![img](./doesNotExist.png)`,
doesNotExistSiteAlias: `![img](@site/doesNotExist.png)`,
urlEmpty: `![img]()`,
};
describe('throws', () => {
it('if image absolute path does not exist', async () => {
await expect(processContent(fixtures.doesNotExistAbsolute)).rejects
.toThrowErrorMatchingInlineSnapshot(`
"Markdown image with URL \`/img/doesNotExist.png\` in source file "packages/docusaurus-mdx-loader/src/remark/transformImage/__tests__/__fixtures__/docs/myFile.mdx" (1:1) couldn't be resolved to an existing local image file.
To ignore this error, use the \`siteConfig.markdown.hooks.onBrokenMarkdownImages\` option, or apply the \`pathname://\` protocol to the broken image URLs."
`);
});
it('if image relative path does not exist', async () => {
await expect(processContent(fixtures.doesNotExistRelative)).rejects
.toThrowErrorMatchingInlineSnapshot(`
"Markdown image with URL \`./doesNotExist.png\` in source file "packages/docusaurus-mdx-loader/src/remark/transformImage/__tests__/__fixtures__/docs/myFile.mdx" (1:1) couldn't be resolved to an existing local image file.
To ignore this error, use the \`siteConfig.markdown.hooks.onBrokenMarkdownImages\` option, or apply the \`pathname://\` protocol to the broken image URLs."
`);
});
it('if image @site path does not exist', async () => {
await expect(processContent(fixtures.doesNotExistSiteAlias)).rejects
.toThrowErrorMatchingInlineSnapshot(`
"Markdown image with URL \`@site/doesNotExist.png\` in source file "packages/docusaurus-mdx-loader/src/remark/transformImage/__tests__/__fixtures__/docs/myFile.mdx" (1:1) couldn't be resolved to an existing local image file.
To ignore this error, use the \`siteConfig.markdown.hooks.onBrokenMarkdownImages\` option, or apply the \`pathname://\` protocol to the broken image URLs."
`);
});
it('if image url empty', async () => {
await expect(processContent(fixtures.urlEmpty)).rejects
.toThrowErrorMatchingInlineSnapshot(`
"Markdown image with empty URL found in source file "packages/docusaurus-mdx-loader/src/remark/transformImage/__tests__/__fixtures__/docs/myFile.mdx" (1:1).
To ignore this error, use the \`siteConfig.markdown.hooks.onBrokenMarkdownImages\` option, or apply the \`pathname://\` protocol to the broken image URLs."
`);
});
});
describe('warns', () => {
function processWarn(content: string) {
return processContent(content, {onBrokenMarkdownImages: 'warn'});
}
const warnMock = jest.spyOn(console, 'warn').mockImplementation(() => {});
beforeEach(() => {
warnMock.mockClear();
});
it('if image absolute path does not exist', async () => {
const result = await processWarn(fixtures.doesNotExistAbsolute);
expect(result).toMatchInlineSnapshot(`
"![img](/img/doesNotExist.png)
"
`);
expect(warnMock).toHaveBeenCalledTimes(1);
expect(warnMock.mock.calls).toMatchInlineSnapshot(`
[
[
"[WARNING] Markdown image with URL \`/img/doesNotExist.png\` in source file "packages/docusaurus-mdx-loader/src/remark/transformImage/__tests__/__fixtures__/docs/myFile.mdx" (1:1) couldn't be resolved to an existing local image file.",
],
]
`);
});
it('if image relative path does not exist', async () => {
const result = await processWarn(fixtures.doesNotExistRelative);
expect(result).toMatchInlineSnapshot(`
"![img](./doesNotExist.png)
"
`);
expect(warnMock).toHaveBeenCalledTimes(1);
expect(warnMock.mock.calls).toMatchInlineSnapshot(`
[
[
"[WARNING] Markdown image with URL \`./doesNotExist.png\` in source file "packages/docusaurus-mdx-loader/src/remark/transformImage/__tests__/__fixtures__/docs/myFile.mdx" (1:1) couldn't be resolved to an existing local image file.",
],
]
`);
});
it('if image @site path does not exist', async () => {
const result = await processWarn(fixtures.doesNotExistSiteAlias);
expect(result).toMatchInlineSnapshot(`
"![img](@site/doesNotExist.png)
"
`);
expect(warnMock).toHaveBeenCalledTimes(1);
expect(warnMock.mock.calls).toMatchInlineSnapshot(`
[
[
"[WARNING] Markdown image with URL \`@site/doesNotExist.png\` in source file "packages/docusaurus-mdx-loader/src/remark/transformImage/__tests__/__fixtures__/docs/myFile.mdx" (1:1) couldn't be resolved to an existing local image file.",
],
]
`);
});
it('if image url empty', async () => {
const result = await processWarn(fixtures.urlEmpty);
expect(result).toMatchInlineSnapshot(`
"![img]()
"
`);
expect(warnMock).toHaveBeenCalledTimes(1);
expect(warnMock.mock.calls).toMatchInlineSnapshot(`
[
[
"[WARNING] Markdown image with empty URL found in source file "packages/docusaurus-mdx-loader/src/remark/transformImage/__tests__/__fixtures__/docs/myFile.mdx" (1:1).",
],
]
`);
});
});
describe('function form', () => {
function processWarn(content: string) {
return processContent(content, {
onBrokenMarkdownImages: (params) => {
console.log('onBrokenMarkdownImages called for ', params);
// We can alter the AST Node
params.node.alt = 'new 404 alt';
params.node.url = 'ignored, less important than returned value';
// Or return a new URL
return '/404.png';
},
});
}
const logMock = jest.spyOn(console, 'log').mockImplementation(() => {});
beforeEach(() => {
logMock.mockClear();
});
it('if image absolute path does not exist', async () => {
const result = await processWarn(fixtures.doesNotExistAbsolute);
expect(result).toMatchInlineSnapshot(`
"![new 404 alt](/404.png)
"
`);
expect(logMock).toHaveBeenCalledTimes(1);
expect(logMock.mock.calls).toMatchInlineSnapshot(`
[
[
"onBrokenMarkdownImages called for ",
{
"node": {
"alt": "new 404 alt",
"position": {
"end": {
"column": 30,
"line": 1,
"offset": 29,
},
"start": {
"column": 1,
"line": 1,
"offset": 0,
},
},
"title": null,
"type": "image",
"url": "/404.png",
},
"sourceFilePath": "packages/docusaurus-mdx-loader/src/remark/transformImage/__tests__/__fixtures__/docs/myFile.mdx",
"url": "/img/doesNotExist.png",
},
],
]
`);
});
it('if image relative path does not exist', async () => {
const result = await processWarn(fixtures.doesNotExistRelative);
expect(result).toMatchInlineSnapshot(`
"![new 404 alt](/404.png)
"
`);
expect(logMock).toHaveBeenCalledTimes(1);
expect(logMock.mock.calls).toMatchInlineSnapshot(`
[
[
"onBrokenMarkdownImages called for ",
{
"node": {
"alt": "new 404 alt",
"position": {
"end": {
"column": 27,
"line": 1,
"offset": 26,
},
"start": {
"column": 1,
"line": 1,
"offset": 0,
},
},
"title": null,
"type": "image",
"url": "/404.png",
},
"sourceFilePath": "packages/docusaurus-mdx-loader/src/remark/transformImage/__tests__/__fixtures__/docs/myFile.mdx",
"url": "./doesNotExist.png",
},
],
]
`);
});
it('if image @site path does not exist', async () => {
const result = await processWarn(fixtures.doesNotExistSiteAlias);
expect(result).toMatchInlineSnapshot(`
"![new 404 alt](/404.png)
"
`);
expect(logMock).toHaveBeenCalledTimes(1);
expect(logMock.mock.calls).toMatchInlineSnapshot(`
[
[
"onBrokenMarkdownImages called for ",
{
"node": {
"alt": "new 404 alt",
"position": {
"end": {
"column": 31,
"line": 1,
"offset": 30,
},
"start": {
"column": 1,
"line": 1,
"offset": 0,
},
},
"title": null,
"type": "image",
"url": "/404.png",
},
"sourceFilePath": "packages/docusaurus-mdx-loader/src/remark/transformImage/__tests__/__fixtures__/docs/myFile.mdx",
"url": "@site/doesNotExist.png",
},
],
]
`);
});
it('if image url empty', async () => {
const result = await processWarn(fixtures.urlEmpty);
expect(result).toMatchInlineSnapshot(`
"![new 404 alt](/404.png)
"
`);
expect(logMock).toHaveBeenCalledTimes(1);
expect(logMock.mock.calls).toMatchInlineSnapshot(`
[
[
"onBrokenMarkdownImages called for ",
{
"node": {
"alt": "new 404 alt",
"position": {
"end": {
"column": 9,
"line": 1,
"offset": 8,
},
"start": {
"column": 1,
"line": 1,
"offset": 0,
},
},
"title": null,
"type": "image",
"url": "/404.png",
},
"sourceFilePath": "packages/docusaurus-mdx-loader/src/remark/transformImage/__tests__/__fixtures__/docs/myFile.mdx",
"url": "",
},
],
]
`);
});
});
});
});

View File

@ -19,22 +19,67 @@ import {
import escapeHtml from 'escape-html';
import {imageSizeFromFile} from 'image-size/fromFile';
import logger from '@docusaurus/logger';
import {assetRequireAttributeValue, transformNode} from '../utils';
import {
assetRequireAttributeValue,
formatNodePositionExtraMessage,
transformNode,
} from '../utils';
import type {Plugin, Transformer} from 'unified';
import type {MdxJsxTextElement} from 'mdast-util-mdx';
import type {Image, Root} from 'mdast';
import type {Parent} from 'unist';
import type {
MarkdownConfig,
OnBrokenMarkdownImagesFunction,
} from '@docusaurus/types';
type PluginOptions = {
export type PluginOptions = {
staticDirs: string[];
siteDir: string;
onBrokenMarkdownImages: MarkdownConfig['hooks']['onBrokenMarkdownImages'];
};
type Context = PluginOptions & {
type Context = {
staticDirs: PluginOptions['staticDirs'];
siteDir: PluginOptions['siteDir'];
onBrokenMarkdownImages: OnBrokenMarkdownImagesFunction;
filePath: string;
inlineMarkdownImageFileLoader: string;
};
function asFunction(
onBrokenMarkdownImages: PluginOptions['onBrokenMarkdownImages'],
): OnBrokenMarkdownImagesFunction {
if (typeof onBrokenMarkdownImages === 'string') {
const extraHelp =
onBrokenMarkdownImages === 'throw'
? logger.interpolate`\nTo ignore this error, use the code=${'siteConfig.markdown.hooks.onBrokenMarkdownImages'} option, or apply the code=${'pathname://'} protocol to the broken image URLs.`
: '';
return ({sourceFilePath, url: imageUrl, node}) => {
const relativePath = toMessageRelativeFilePath(sourceFilePath);
if (imageUrl) {
logger.report(
onBrokenMarkdownImages,
)`Markdown image with URL code=${imageUrl} in source file path=${relativePath}${formatNodePositionExtraMessage(
node,
)} couldn't be resolved to an existing local image file.${extraHelp}`;
} else {
logger.report(
onBrokenMarkdownImages,
)`Markdown image with empty URL found in source file path=${relativePath}${formatNodePositionExtraMessage(
node,
)}.${extraHelp}`;
}
};
} else {
return (params) =>
onBrokenMarkdownImages({
...params,
sourceFilePath: toMessageRelativeFilePath(params.sourceFilePath),
});
}
}
type Target = [node: Image, index: number, parent: Parent];
async function toImageRequireNode(
@ -51,7 +96,7 @@ async function toImageRequireNode(
);
relativeImagePath = `./${relativeImagePath}`;
const parsedUrl = parseURLOrPath(node.url, 'https://example.com');
const parsedUrl = parseURLOrPath(node.url);
const hash = parsedUrl.hash ?? '';
const search = parsedUrl.search ?? '';
const requireString = `${context.inlineMarkdownImageFileLoader}${
@ -113,57 +158,53 @@ ${(err as Error).message}`;
});
}
async function ensureImageFileExist(imagePath: string, sourceFilePath: string) {
const imageExists = await fs.pathExists(imagePath);
if (!imageExists) {
throw new Error(
`Image ${toMessageRelativeFilePath(
imagePath,
)} used in ${toMessageRelativeFilePath(sourceFilePath)} not found.`,
);
}
}
async function getImageAbsolutePath(
imagePath: string,
async function getLocalImageAbsolutePath(
originalImagePath: string,
{siteDir, filePath, staticDirs}: Context,
) {
if (imagePath.startsWith('@site/')) {
const imageFilePath = path.join(siteDir, imagePath.replace('@site/', ''));
await ensureImageFileExist(imageFilePath, filePath);
if (originalImagePath.startsWith('@site/')) {
const imageFilePath = path.join(
siteDir,
originalImagePath.replace('@site/', ''),
);
if (!(await fs.pathExists(imageFilePath))) {
return null;
}
return imageFilePath;
} else if (path.isAbsolute(imagePath)) {
} else if (path.isAbsolute(originalImagePath)) {
// Absolute paths are expected to exist in the static folder.
const possiblePaths = staticDirs.map((dir) => path.join(dir, imagePath));
const possiblePaths = staticDirs.map((dir) =>
path.join(dir, originalImagePath),
);
const imageFilePath = await findAsyncSequential(
possiblePaths,
fs.pathExists,
);
if (!imageFilePath) {
throw new Error(
`Image ${possiblePaths
.map((p) => toMessageRelativeFilePath(p))
.join(' or ')} used in ${toMessageRelativeFilePath(
filePath,
)} not found.`,
);
return null;
}
return imageFilePath;
} else {
// relative paths are resolved against the source file's folder
const imageFilePath = path.join(path.dirname(filePath), originalImagePath);
if (!(await fs.pathExists(imageFilePath))) {
return null;
}
return imageFilePath;
}
// relative paths are resolved against the source file's folder
const imageFilePath = path.join(path.dirname(filePath), imagePath);
await ensureImageFileExist(imageFilePath, filePath);
return imageFilePath;
}
async function processImageNode(target: Target, context: Context) {
const [node] = target;
if (!node.url) {
throw new Error(
`Markdown image URL is mandatory in "${toMessageRelativeFilePath(
context.filePath,
)}" file`,
);
node.url =
context.onBrokenMarkdownImages({
url: node.url,
sourceFilePath: context.filePath,
node,
}) ?? node.url;
return;
}
const parsedUrl = url.parse(node.url);
@ -183,13 +224,27 @@ async function processImageNode(target: Target, context: Context) {
// We try to convert image urls without protocol to images with require calls
// going through webpack ensures that image assets exist at build time
const imagePath = await getImageAbsolutePath(decodedPathname, context);
await toImageRequireNode(target, imagePath, context);
const localImagePath = await getLocalImageAbsolutePath(
decodedPathname,
context,
);
if (localImagePath === null) {
node.url =
context.onBrokenMarkdownImages({
url: node.url,
sourceFilePath: context.filePath,
node,
}) ?? node.url;
} else {
await toImageRequireNode(target, localImagePath, context);
}
}
const plugin: Plugin<PluginOptions[], Root> = function plugin(
options,
): Transformer<Root> {
const onBrokenMarkdownImages = asFunction(options.onBrokenMarkdownImages);
return async (root, vfile) => {
const {visit} = await import('unist-util-visit');
@ -201,6 +256,7 @@ const plugin: Plugin<PluginOptions[], Root> = function plugin(
filePath: vfile.path!,
inlineMarkdownImageFileLoader:
fileLoaderUtils.loaders.inlineMarkdownImageFileLoader,
onBrokenMarkdownImages,
};
const promises: Promise<void>[] = [];

View File

@ -1 +0,0 @@
[asset](pathname:///asset/unchecked.pdf)

View File

@ -1,15 +1,6 @@
// Jest Snapshot v1, https://goo.gl/fbAQLP
exports[`transformAsset plugin fail if asset url is absent 1`] = `"Markdown link URL is mandatory in "packages/docusaurus-mdx-loader/src/remark/transformLinks/__tests__/__fixtures__/noUrl.md" file (title: asset, line: 1)."`;
exports[`transformAsset plugin fail if asset with site alias does not exist 1`] = `"Asset packages/docusaurus-mdx-loader/src/remark/transformLinks/__tests__/__fixtures__/foo.pdf used in packages/docusaurus-mdx-loader/src/remark/transformLinks/__tests__/__fixtures__/nonexistentSiteAlias.md not found."`;
exports[`transformAsset plugin pathname protocol 1`] = `
"[asset](pathname:///asset/unchecked.pdf)
"
`;
exports[`transformAsset plugin transform md links to <a /> 1`] = `
exports[`transformLinks plugin transform md links to <a /> 1`] = `
"[asset](https://example.com/asset.pdf)
<a target="_blank" data-noBrokenLinkCheck={true} href={require("!<PROJECT_ROOT>/node_modules/file-loader/dist/cjs.js?name=assets/files/[name]-[contenthash].[ext]!./asset.pdf").default} />
@ -54,6 +45,5 @@ in paragraph <a target="_blank" data-noBrokenLinkCheck={true} href={require("!<P
<a target="_blank" data-noBrokenLinkCheck={true} href={require("./data.raw!=!<PROJECT_ROOT>/node_modules/file-loader/dist/cjs.js?name=assets/files/[name]-[contenthash].[ext]!./data.json").default}>JSON</a>
<a target="_blank" data-noBrokenLinkCheck={true} href={require("./static/static-json.raw!=!<PROJECT_ROOT>/node_modules/file-loader/dist/cjs.js?name=assets/files/[name]-[contenthash].[ext]!./static/static-json.json").default}>static JSON</a>
"
<a target="_blank" data-noBrokenLinkCheck={true} href={require("./static/static-json.raw!=!<PROJECT_ROOT>/node_modules/file-loader/dist/cjs.js?name=assets/files/[name]-[contenthash].[ext]!./static/static-json.json").default}>static JSON</a>"
`;

View File

@ -5,53 +5,270 @@
* LICENSE file in the root directory of this source tree.
*/
import path from 'path';
import {jest} from '@jest/globals';
import * as path from 'path';
import vfile from 'to-vfile';
import plugin from '..';
import transformImage, {type PluginOptions} from '../../transformImage';
import plugin, {type PluginOptions} from '..';
import transformImage from '../../transformImage';
const processFixture = async (name: string, options?: PluginOptions) => {
const siteDir = path.join(__dirname, `__fixtures__`);
const staticDirs = [
path.join(siteDir, 'static'),
path.join(siteDir, 'static2'),
];
const getProcessor = async (options?: Partial<PluginOptions>) => {
const {remark} = await import('remark');
const {default: mdx} = await import('remark-mdx');
const siteDir = path.join(__dirname, `__fixtures__`);
const staticDirs = [
path.join(siteDir, 'static'),
path.join(siteDir, 'static2'),
];
const file = await vfile.read(path.join(siteDir, `${name}.md`));
const result = await remark()
return remark()
.use(mdx)
.use(transformImage, {...options, siteDir, staticDirs})
.use(plugin, {
...options,
.use(transformImage, {
siteDir,
staticDirs,
siteDir: path.join(__dirname, '__fixtures__'),
onBrokenMarkdownImages: 'throw',
})
.process(file);
return result.value;
.use(plugin, {
staticDirs,
siteDir,
onBrokenMarkdownLinks: 'throw',
...options,
});
};
describe('transformAsset plugin', () => {
it('fail if asset url is absent', async () => {
await expect(
processFixture('noUrl'),
).rejects.toThrowErrorMatchingSnapshot();
});
const processFixture = async (
name: string,
options?: Partial<PluginOptions>,
) => {
const processor = await getProcessor(options);
const file = await vfile.read(path.join(siteDir, `${name}.md`));
const result = await processor.process(file);
return result.value.toString().trim();
};
it('fail if asset with site alias does not exist', async () => {
await expect(
processFixture('nonexistentSiteAlias'),
).rejects.toThrowErrorMatchingSnapshot();
const processContent = async (
content: string,
options?: Partial<PluginOptions>,
) => {
const processor = await getProcessor(options);
const result = await processor.process({
value: content,
path: path.posix.join(siteDir, 'docs', 'myFile.mdx'),
});
return result.value.toString().trim();
};
describe('transformLinks plugin', () => {
it('transform md links to <a />', async () => {
// TODO split fixture in many smaller test cases
const result = await processFixture('asset');
expect(result).toMatchSnapshot();
});
it('pathname protocol', async () => {
const result = await processFixture('pathname');
expect(result).toMatchSnapshot();
const result = await processContent(`pathname:///unchecked.pdf)`);
expect(result).toMatchInlineSnapshot(`"pathname:///unchecked.pdf)"`);
});
it('accepts absolute file that does not exist', async () => {
const result = await processContent(`[file](/dir/file.zip)`);
expect(result).toMatchInlineSnapshot(`"[file](/dir/file.zip)"`);
});
it('accepts relative file that does not exist', async () => {
const result = await processContent(`[file](dir/file.zip)`);
expect(result).toMatchInlineSnapshot(`"[file](dir/file.zip)"`);
});
describe('onBrokenMarkdownLinks', () => {
const fixtures = {
urlEmpty: `[empty]()`,
fileDoesNotExistSiteAlias: `[file](@site/file.zip)`,
};
describe('throws', () => {
it('if url is empty', async () => {
await expect(processContent(fixtures.urlEmpty)).rejects
.toThrowErrorMatchingInlineSnapshot(`
"Markdown link with empty URL found in source file "packages/docusaurus-mdx-loader/src/remark/transformLinks/__tests__/__fixtures__/docs/myFile.mdx" (1:1).
To ignore this error, use the \`siteConfig.markdown.hooks.onBrokenMarkdownLinks\` option, or apply the \`pathname://\` protocol to the broken link URLs."
`);
});
it('if file with site alias does not exist', async () => {
await expect(processContent(fixtures.fileDoesNotExistSiteAlias)).rejects
.toThrowErrorMatchingInlineSnapshot(`
"Markdown link with URL \`@site/file.zip\` in source file "packages/docusaurus-mdx-loader/src/remark/transformLinks/__tests__/__fixtures__/docs/myFile.mdx" (1:1) couldn't be resolved.
Make sure it references a local Markdown file that exists within the current plugin.
To ignore this error, use the \`siteConfig.markdown.hooks.onBrokenMarkdownLinks\` option, or apply the \`pathname://\` protocol to the broken link URLs."
`);
});
});
describe('warns', () => {
function processWarn(content: string) {
return processContent(content, {onBrokenMarkdownLinks: 'warn'});
}
const warnMock = jest.spyOn(console, 'warn').mockImplementation(() => {});
beforeEach(() => {
warnMock.mockClear();
});
it('if url is empty', async () => {
const result = await processWarn(fixtures.urlEmpty);
expect(result).toMatchInlineSnapshot(`"[empty]()"`);
expect(warnMock).toHaveBeenCalledTimes(1);
expect(warnMock.mock.calls).toMatchInlineSnapshot(`
[
[
"[WARNING] Markdown link with empty URL found in source file "packages/docusaurus-mdx-loader/src/remark/transformLinks/__tests__/__fixtures__/docs/myFile.mdx" (1:1).",
],
]
`);
});
it('if file with site alias does not exist', async () => {
const result = await processWarn(fixtures.fileDoesNotExistSiteAlias);
expect(result).toMatchInlineSnapshot(`"[file](@site/file.zip)"`);
expect(warnMock).toHaveBeenCalledTimes(1);
expect(warnMock.mock.calls).toMatchInlineSnapshot(`
[
[
"[WARNING] Markdown link with URL \`@site/file.zip\` in source file "packages/docusaurus-mdx-loader/src/remark/transformLinks/__tests__/__fixtures__/docs/myFile.mdx" (1:1) couldn't be resolved.
Make sure it references a local Markdown file that exists within the current plugin.",
],
]
`);
});
});
describe('function form', () => {
function processWarn(content: string) {
return processContent(content, {
onBrokenMarkdownLinks: (params) => {
console.log('onBrokenMarkdownLinks called with', params);
// We can alter the AST Node
params.node.title = 'fixed link title';
params.node.url = 'ignored, less important than returned value';
// Or return a new URL
return '/404';
},
});
}
const logMock = jest.spyOn(console, 'log').mockImplementation(() => {});
beforeEach(() => {
logMock.mockClear();
});
it('if url is empty', async () => {
const result = await processWarn(fixtures.urlEmpty);
expect(result).toMatchInlineSnapshot(
`"[empty](/404 "fixed link title")"`,
);
expect(logMock).toHaveBeenCalledTimes(1);
expect(logMock.mock.calls).toMatchInlineSnapshot(`
[
[
"onBrokenMarkdownLinks called with",
{
"node": {
"children": [
{
"position": {
"end": {
"column": 7,
"line": 1,
"offset": 6,
},
"start": {
"column": 2,
"line": 1,
"offset": 1,
},
},
"type": "text",
"value": "empty",
},
],
"position": {
"end": {
"column": 10,
"line": 1,
"offset": 9,
},
"start": {
"column": 1,
"line": 1,
"offset": 0,
},
},
"title": "fixed link title",
"type": "link",
"url": "/404",
},
"sourceFilePath": "packages/docusaurus-mdx-loader/src/remark/transformLinks/__tests__/__fixtures__/docs/myFile.mdx",
"url": "",
},
],
]
`);
});
it('if file with site alias does not exist', async () => {
const result = await processWarn(fixtures.fileDoesNotExistSiteAlias);
expect(result).toMatchInlineSnapshot(
`"[file](/404 "fixed link title")"`,
);
expect(logMock).toHaveBeenCalledTimes(1);
expect(logMock.mock.calls).toMatchInlineSnapshot(`
[
[
"onBrokenMarkdownLinks called with",
{
"node": {
"children": [
{
"position": {
"end": {
"column": 6,
"line": 1,
"offset": 5,
},
"start": {
"column": 2,
"line": 1,
"offset": 1,
},
},
"type": "text",
"value": "file",
},
],
"position": {
"end": {
"column": 23,
"line": 1,
"offset": 22,
},
"start": {
"column": 1,
"line": 1,
"offset": 0,
},
},
"title": "fixed link title",
"type": "link",
"url": "/404",
},
"sourceFilePath": "packages/docusaurus-mdx-loader/src/remark/transformLinks/__tests__/__fixtures__/docs/myFile.mdx",
"url": "@site/file.zip",
},
],
]
`);
});
});
});
});

View File

@ -17,24 +17,72 @@ import {
parseURLOrPath,
} from '@docusaurus/utils';
import escapeHtml from 'escape-html';
import {assetRequireAttributeValue, transformNode} from '../utils';
import logger from '@docusaurus/logger';
import {
assetRequireAttributeValue,
formatNodePositionExtraMessage,
transformNode,
} from '../utils';
import type {Plugin, Transformer} from 'unified';
import type {MdxJsxTextElement} from 'mdast-util-mdx';
import type {Parent} from 'unist';
import type {Link, Literal, Root} from 'mdast';
import type {Link, Root} from 'mdast';
import type {
MarkdownConfig,
OnBrokenMarkdownLinksFunction,
} from '@docusaurus/types';
type PluginOptions = {
export type PluginOptions = {
staticDirs: string[];
siteDir: string;
onBrokenMarkdownLinks: MarkdownConfig['hooks']['onBrokenMarkdownLinks'];
};
type Context = PluginOptions & {
staticDirs: string[];
siteDir: string;
onBrokenMarkdownLinks: OnBrokenMarkdownLinksFunction;
filePath: string;
inlineMarkdownLinkFileLoader: string;
};
type Target = [node: Link, index: number, parent: Parent];
function asFunction(
onBrokenMarkdownLinks: PluginOptions['onBrokenMarkdownLinks'],
): OnBrokenMarkdownLinksFunction {
if (typeof onBrokenMarkdownLinks === 'string') {
const extraHelp =
onBrokenMarkdownLinks === 'throw'
? logger.interpolate`\nTo ignore this error, use the code=${'siteConfig.markdown.hooks.onBrokenMarkdownLinks'} option, or apply the code=${'pathname://'} protocol to the broken link URLs.`
: '';
return ({sourceFilePath, url: linkUrl, node}) => {
const relativePath = toMessageRelativeFilePath(sourceFilePath);
if (linkUrl) {
logger.report(
onBrokenMarkdownLinks,
)`Markdown link with URL code=${linkUrl} in source file path=${relativePath}${formatNodePositionExtraMessage(
node,
)} couldn't be resolved.
Make sure it references a local Markdown file that exists within the current plugin.${extraHelp}`;
} else {
logger.report(
onBrokenMarkdownLinks,
)`Markdown link with empty URL found in source file path=${relativePath}${formatNodePositionExtraMessage(
node,
)}.${extraHelp}`;
}
};
} else {
return (params) =>
onBrokenMarkdownLinks({
...params,
sourceFilePath: toMessageRelativeFilePath(params.sourceFilePath),
});
}
}
/**
* Transforms the link node to a JSX `<a>` element with a `require()` call.
*/
@ -123,27 +171,15 @@ async function toAssetRequireNode(
});
}
async function ensureAssetFileExist(assetPath: string, sourceFilePath: string) {
const assetExists = await fs.pathExists(assetPath);
if (!assetExists) {
throw new Error(
`Asset ${toMessageRelativeFilePath(
assetPath,
)} used in ${toMessageRelativeFilePath(sourceFilePath)} not found.`,
);
}
}
async function getAssetAbsolutePath(
async function getLocalFileAbsolutePath(
assetPath: string,
{siteDir, filePath, staticDirs}: Context,
) {
if (assetPath.startsWith('@site/')) {
const assetFilePath = path.join(siteDir, assetPath.replace('@site/', ''));
// The @site alias is the only way to believe that the user wants an asset.
// Everything else can just be a link URL
await ensureAssetFileExist(assetFilePath, filePath);
return assetFilePath;
if (await fs.pathExists(assetFilePath)) {
return assetFilePath;
}
} else if (path.isAbsolute(assetPath)) {
const assetFilePath = await findAsyncSequential(
staticDirs.map((dir) => path.join(dir, assetPath)),
@ -164,16 +200,13 @@ async function getAssetAbsolutePath(
async function processLinkNode(target: Target, context: Context) {
const [node] = target;
if (!node.url) {
// Try to improve error feedback
// see https://github.com/facebook/docusaurus/issues/3309#issuecomment-690371675
const title =
node.title ?? (node.children[0] as Literal | undefined)?.value ?? '?';
const line = node.position?.start.line ?? '?';
throw new Error(
`Markdown link URL is mandatory in "${toMessageRelativeFilePath(
context.filePath,
)}" file (title: ${title}, line: ${line}).`,
);
node.url =
context.onBrokenMarkdownLinks({
url: node.url,
sourceFilePath: context.filePath,
node,
}) ?? node.url;
return;
}
const parsedUrl = url.parse(node.url);
@ -189,29 +222,48 @@ async function processLinkNode(target: Target, context: Context) {
return;
}
const assetPath = await getAssetAbsolutePath(
const localFilePath = await getLocalFileAbsolutePath(
decodeURIComponent(parsedUrl.pathname),
context,
);
if (assetPath) {
await toAssetRequireNode(target, assetPath, context);
if (localFilePath) {
await toAssetRequireNode(target, localFilePath, context);
} else {
// The @site alias is the only way to believe that the user wants an asset.
if (hasSiteAlias) {
node.url =
context.onBrokenMarkdownLinks({
url: node.url,
sourceFilePath: context.filePath,
node,
}) ?? node.url;
} else {
// Even if the url has a dot, and it looks like a file extension
// it can be risky to throw and fail fast by default
// It's perfectly valid for a route path segment to look like a filename
}
}
}
const plugin: Plugin<PluginOptions[], Root> = function plugin(
options,
): Transformer<Root> {
const onBrokenMarkdownLinks = asFunction(options.onBrokenMarkdownLinks);
return async (root, vfile) => {
const {visit} = await import('unist-util-visit');
const fileLoaderUtils = getFileLoaderUtils(
vfile.data.compilerName === 'server',
);
const context: Context = {
...options,
filePath: vfile.path!,
inlineMarkdownLinkFileLoader:
fileLoaderUtils.loaders.inlineMarkdownLinkFileLoader,
onBrokenMarkdownLinks,
};
const promises: Promise<void>[] = [];

View File

@ -8,7 +8,7 @@ import path from 'path';
import process from 'process';
import logger from '@docusaurus/logger';
import {posixPath} from '@docusaurus/utils';
import {transformNode} from '../utils';
import {formatNodePositionExtraMessage, transformNode} from '../utils';
import type {Root} from 'mdast';
import type {Parent} from 'unist';
import type {Transformer, Processor, Plugin} from 'unified';
@ -39,17 +39,9 @@ function formatDirectiveName(directive: Directives) {
return `${prefix}${directive.name}`;
}
function formatDirectivePosition(directive: Directives): string | undefined {
return directive.position?.start
? logger.interpolate`number=${directive.position.start.line}:number=${directive.position.start.column}`
: undefined;
}
function formatUnusedDirectiveMessage(directive: Directives) {
const name = formatDirectiveName(directive);
const position = formatDirectivePosition(directive);
return `- ${name} ${position ? `(${position})` : ''}`;
return `- ${name}${formatNodePositionExtraMessage(directive)}`;
}
function formatUnusedDirectivesMessage({

View File

@ -5,6 +5,7 @@
* LICENSE file in the root directory of this source tree.
*/
import logger from '@docusaurus/logger';
import type {Node} from 'unist';
import type {MdxJsxAttributeValueExpression} from 'mdast-util-mdx';
@ -83,3 +84,16 @@ export function assetRequireAttributeValue(
},
};
}
function formatNodePosition(node: Node): string | undefined {
return node.position?.start
? logger.interpolate`number=${node.position.start.line}:number=${node.position.start.column}`
: undefined;
}
// Returns " (line:column)" when position info is available
// The initial space is useful to append easily to any existing message
export function formatNodePositionExtraMessage(node: Node): string {
const position = formatNodePosition(node);
return `${position ? ` (${position})` : ''}`;
}

View File

@ -1,6 +1,6 @@
{
"name": "@docusaurus/module-type-aliases",
"version": "3.8.0",
"version": "3.9.2",
"description": "Docusaurus module type aliases.",
"types": "./src/index.d.ts",
"publishConfig": {
@ -12,7 +12,7 @@
"directory": "packages/docusaurus-module-type-aliases"
},
"dependencies": {
"@docusaurus/types": "3.8.0",
"@docusaurus/types": "3.9.2",
"@types/history": "^4.7.11",
"@types/react": "*",
"@types/react-router-config": "*",

View File

@ -1,6 +1,6 @@
{
"name": "@docusaurus/plugin-client-redirects",
"version": "3.8.0",
"version": "3.9.2",
"description": "Client redirects plugin for Docusaurus.",
"main": "lib/index.js",
"types": "lib/index.d.ts",
@ -18,24 +18,24 @@
},
"license": "MIT",
"dependencies": {
"@docusaurus/core": "3.8.0",
"@docusaurus/logger": "3.8.0",
"@docusaurus/utils": "3.8.0",
"@docusaurus/utils-common": "3.8.0",
"@docusaurus/utils-validation": "3.8.0",
"@docusaurus/core": "3.9.2",
"@docusaurus/logger": "3.9.2",
"@docusaurus/utils": "3.9.2",
"@docusaurus/utils-common": "3.9.2",
"@docusaurus/utils-validation": "3.9.2",
"eta": "^2.2.0",
"fs-extra": "^11.1.1",
"lodash": "^4.17.21",
"tslib": "^2.6.0"
},
"devDependencies": {
"@docusaurus/types": "3.8.0"
"@docusaurus/types": "3.9.2"
},
"peerDependencies": {
"react": "^18.0.0 || ^19.0.0",
"react-dom": "^18.0.0 || ^19.0.0"
},
"engines": {
"node": ">=18.0"
"node": ">=20.0"
}
}

View File

@ -1,6 +1,6 @@
{
"name": "@docusaurus/plugin-content-blog",
"version": "3.8.0",
"version": "3.9.2",
"description": "Blog plugin for Docusaurus.",
"main": "lib/index.js",
"types": "src/plugin-content-blog.d.ts",
@ -31,14 +31,14 @@
},
"license": "MIT",
"dependencies": {
"@docusaurus/core": "3.8.0",
"@docusaurus/logger": "3.8.0",
"@docusaurus/mdx-loader": "3.8.0",
"@docusaurus/theme-common": "3.8.0",
"@docusaurus/types": "3.8.0",
"@docusaurus/utils": "3.8.0",
"@docusaurus/utils-common": "3.8.0",
"@docusaurus/utils-validation": "3.8.0",
"@docusaurus/core": "3.9.2",
"@docusaurus/logger": "3.9.2",
"@docusaurus/mdx-loader": "3.9.2",
"@docusaurus/theme-common": "3.9.2",
"@docusaurus/types": "3.9.2",
"@docusaurus/utils": "3.9.2",
"@docusaurus/utils-common": "3.9.2",
"@docusaurus/utils-validation": "3.9.2",
"cheerio": "1.0.0-rc.12",
"feed": "^4.2.2",
"fs-extra": "^11.1.1",
@ -56,7 +56,7 @@
"react-dom": "^18.0.0 || ^19.0.0"
},
"engines": {
"node": ">=18.0"
"node": ">=20.0"
},
"devDependencies": {
"@total-typescript/shoehorn": "^0.1.2",

View File

@ -21,11 +21,13 @@ describe('normalizeSocials', () => {
twitch: 'gingergeek',
youtube: 'gingergeekuk',
mastodon: 'Mastodon',
email: 'seb@example.com',
};
expect(normalizeSocials(socials)).toMatchInlineSnapshot(`
{
"bluesky": "https://bsky.app/profile/gingergeek.co.uk",
"email": "mailto:seb@example.com",
"github": "https://github.com/ozakione",
"instagram": "https://www.instagram.com/thisweekinreact",
"linkedin": "https://www.linkedin.com/in/ozakione/",
@ -48,11 +50,13 @@ describe('normalizeSocials', () => {
instaGRam: 'thisweekinreact',
BLUESKY: 'gingergeek.co.uk',
tHrEaDs: 'gingergeekuk',
eMAil: 'seb@example.com',
};
expect(normalizeSocials(socials)).toMatchInlineSnapshot(`
{
"bluesky": "https://bsky.app/profile/gingergeek.co.uk",
"email": "mailto:seb@example.com",
"github": "https://github.com/ozakione",
"instagram": "https://www.instagram.com/thisweekinreact",
"linkedin": "https://www.linkedin.com/in/ozakione/",
@ -69,6 +73,7 @@ describe('normalizeSocials', () => {
linkedin: 'https://linkedin.com/ozakione',
github: 'https://github.com/ozakione',
stackoverflow: 'https://stackoverflow.com/ozakione',
email: 'mailto:seb@example.com',
};
expect(normalizeSocials(socials)).toEqual(socials);
@ -81,10 +86,12 @@ describe('normalizeSocials', () => {
github: 'https://github.com/ozakione',
stackoverflow: 'https://stackoverflow.com/ozakione',
mastodon: 'https://hachyderm.io/@hachyderm',
email: 'mailto:seb@example.com',
};
expect(normalizeSocials(socials)).toMatchInlineSnapshot(`
{
"email": "mailto:seb@example.com",
"github": "https://github.com/ozakione",
"linkedin": "https://www.linkedin.com/in/ozakione/",
"mastodon": "https://hachyderm.io/@hachyderm",

View File

@ -6,12 +6,13 @@
*/
import {jest} from '@jest/globals';
import path from 'path';
import * as path from 'path';
import {normalizePluginOptions} from '@docusaurus/utils-validation';
import {
posixPath,
getFileCommitDate,
LAST_UPDATE_FALLBACK,
getLocaleConfig,
} from '@docusaurus/utils';
import {DEFAULT_FUTURE_CONFIG} from '@docusaurus/core/src/server/configValidation';
import pluginContentBlog from '../index';
@ -22,6 +23,7 @@ import type {
I18n,
Validate,
MarkdownConfig,
I18nLocaleConfig,
} from '@docusaurus/types';
import type {
BlogPost,
@ -67,7 +69,10 @@ Available blog post titles are:\n- ${blogPosts
return post;
}
function getI18n(locale: string): I18n {
function getI18n(
locale: string,
localeConfigOptions?: Partial<I18nLocaleConfig>,
): I18n {
return {
currentLocale: locale,
locales: [locale],
@ -80,6 +85,8 @@ function getI18n(locale: string): I18n {
htmlLang: locale,
direction: 'ltr',
path: locale,
translate: true,
...localeConfigOptions,
},
},
};
@ -94,13 +101,14 @@ const BaseEditUrl = 'https://baseEditUrl.com/edit';
const getPlugin = async (
siteDir: string,
pluginOptions: Partial<PluginOptions> = {},
i18n: I18n = DefaultI18N,
i18nOptions: Partial<I18n> = {},
) => {
const i18n = {...DefaultI18N, ...i18nOptions};
const generatedFilesDir: string = path.resolve(siteDir, '.docusaurus');
const localizationDir = path.join(
siteDir,
i18n.path,
i18n.localeConfigs[i18n.currentLocale]!.path,
getLocaleConfig(i18n).path,
);
const siteConfig = {
title: 'Hello',
@ -153,20 +161,34 @@ const getBlogTags = async (
};
describe('blog plugin', () => {
it('getPathsToWatch returns right files', async () => {
const siteDir = path.join(__dirname, '__fixtures__', 'website');
const plugin = await getPlugin(siteDir);
const pathsToWatch = plugin.getPathsToWatch!();
const relativePathsToWatch = pathsToWatch.map((p) =>
posixPath(path.relative(siteDir, p)),
);
expect(relativePathsToWatch).toEqual([
'i18n/en/docusaurus-plugin-content-blog/authors.yml',
'i18n/en/docusaurus-plugin-content-blog/tags.yml',
'blog/tags.yml',
'i18n/en/docusaurus-plugin-content-blog/**/*.{md,mdx}',
'blog/**/*.{md,mdx}',
]);
describe('getPathsToWatch', () => {
async function runTest({translate}: {translate: boolean}) {
const siteDir = path.join(__dirname, '__fixtures__', 'website');
const plugin = await getPlugin(siteDir, {}, getI18n('en', {translate}));
const pathsToWatch = plugin.getPathsToWatch!();
return pathsToWatch.map((p) => posixPath(path.relative(siteDir, p)));
}
it('getPathsToWatch returns right files', async () => {
const relativePathsToWatch = await runTest({translate: true});
expect(relativePathsToWatch).toEqual([
'i18n/en/docusaurus-plugin-content-blog/authors.yml',
'i18n/en/docusaurus-plugin-content-blog/tags.yml',
// 'blog/authors.yml', // TODO weird that it's not here but tags is?
'blog/tags.yml',
'i18n/en/docusaurus-plugin-content-blog/**/*.{md,mdx}',
'blog/**/*.{md,mdx}',
]);
});
it('getPathsToWatch returns right files (translate: false)', async () => {
const relativePathsToWatch = await runTest({translate: false});
expect(relativePathsToWatch).toEqual([
'blog/authors.yml',
'blog/tags.yml',
'blog/**/*.{md,mdx}',
]);
});
});
it('builds a simple website', async () => {
@ -377,6 +399,54 @@ describe('blog plugin', () => {
});
});
describe('i18n config translate is wired properly', () => {
async function runTest({translate}: {translate: boolean}) {
const siteDir = path.join(__dirname, '__fixtures__', 'website');
const blogPosts = await getBlogPosts(
siteDir,
{},
getI18n('en', {translate}),
);
// Simpler to snapshot
return blogPosts.map((post) => post.metadata.title);
}
it('works with translate: false', async () => {
await expect(runTest({translate: false})).resolves.toMatchInlineSnapshot(`
[
"test links",
"MDX Blog Sample with require calls",
"Full Blog Sample",
"Complex Slug",
"Simple Slug",
"draft",
"unlisted",
"some heading",
"date-matter",
"Happy 1st Birthday Slash!",
]
`);
});
it('works with translate: true', async () => {
await expect(runTest({translate: true})).resolves.toMatchInlineSnapshot(`
[
"test links",
"MDX Blog Sample with require calls",
"Full Blog Sample",
"Complex Slug",
"Simple Slug",
"draft",
"unlisted",
"some heading",
"date-matter",
"Happy 1st Birthday Slash! (translated)",
]
`);
});
});
it('handles edit URL with editLocalizedBlogs: true', async () => {
const siteDir = path.join(__dirname, '__fixtures__', 'website');
const blogPosts = await getBlogPosts(siteDir, {editLocalizedFiles: true});
@ -390,6 +460,23 @@ describe('blog plugin', () => {
);
});
it('handles edit URL with editLocalizedBlogs: true and translate: false', async () => {
const siteDir = path.join(__dirname, '__fixtures__', 'website');
const blogPosts = await getBlogPosts(
siteDir,
{editLocalizedFiles: true},
getI18n('en', {translate: false}),
);
const localizedBlogPost = blogPosts.find(
(v) => v.metadata.title === 'Happy 1st Birthday Slash!',
)!;
expect(localizedBlogPost.metadata.editUrl).toBe(
`${BaseEditUrl}/blog/2018-12-14-Happy-First-Birthday-Slash.md`,
);
});
it('handles edit URL with editUrl function', async () => {
const siteDir = path.join(__dirname, '__fixtures__', 'website');

View File

@ -27,6 +27,7 @@ export const AuthorSocialsSchema = Joi.object<AuthorSocials>({
mastodon: Joi.string(),
twitch: Joi.string(),
youtube: Joi.string(),
email: Joi.string(),
}).unknown();
type PredefinedPlatformNormalizer = (value: string) => string;
@ -47,12 +48,12 @@ const PredefinedPlatformNormalizers: Record<
mastodon: (handle: string) => `https://mastodon.social/@${handle}`, // can be in format user@other.server and it will redirect if needed
twitch: (handle: string) => `https://twitch.tv/${handle}`,
youtube: (handle: string) => `https://youtube.com/@${handle}`, // https://support.google.com/youtube/answer/6180214?hl=en
email: (email: string) => `mailto:${email}`,
};
type SocialEntry = [string, string];
function normalizeSocialEntry([platform, value]: SocialEntry): SocialEntry {
const normalizer = PredefinedPlatformNormalizers[platform.toLowerCase()];
if (typeof value !== 'string') {
throw new Error(
`Author socials should be usernames/userIds/handles, or fully qualified HTTP(s) absolute URLs.
@ -60,7 +61,9 @@ Social platform '${platform}' has illegal value '${value}'`,
);
}
const isAbsoluteUrl =
value.startsWith('http://') || value.startsWith('https://');
value.startsWith('http://') ||
value.startsWith('https://') ||
value.startsWith('mailto:');
if (isAbsoluteUrl) {
return [platform, value];
} else if (value.includes('/')) {
@ -69,6 +72,7 @@ Social platform '${platform}' has illegal value '${value}'`,
Social platform '${platform}' has illegal value '${value}'`,
);
}
const normalizer = PredefinedPlatformNormalizers[platform.toLowerCase()];
if (normalizer && !isAbsoluteUrl) {
const normalizedPlatform = platform.toLowerCase();
const normalizedValue = normalizer(value);

View File

@ -323,7 +323,9 @@ async function processBlogSourceFile(
} else if (typeof editUrl === 'string') {
const isLocalized = blogDirPath === contentPaths.contentPathLocalized;
const fileContentPath =
isLocalized && options.editLocalizedFiles
isLocalized &&
options.editLocalizedFiles &&
contentPaths.contentPathLocalized
? contentPaths.contentPathLocalized
: contentPaths.contentPath;

View File

@ -19,6 +19,7 @@ import {
getDataFilePath,
DEFAULT_PLUGIN_ID,
resolveMarkdownLinkPathname,
getLocaleConfig,
} from '@docusaurus/utils';
import {getTagsFilePathsToWatch} from '@docusaurus/utils-validation';
import {createMDXLoaderItem} from '@docusaurus/mdx-loader';
@ -71,15 +72,18 @@ export default async function pluginContentBlog(
);
}
const {onBrokenMarkdownLinks, baseUrl} = siteConfig;
const {baseUrl} = siteConfig;
const shouldTranslate = getLocaleConfig(context.i18n).translate;
const contentPaths: BlogContentPaths = {
contentPath: path.resolve(siteDir, options.path),
contentPathLocalized: getPluginI18nPath({
localizationDir,
pluginName: PluginName,
pluginId: options.id,
}),
contentPathLocalized: shouldTranslate
? getPluginI18nPath({
localizationDir,
pluginName: PluginName,
pluginId: options.id,
})
: undefined,
};
const pluginId = options.id ?? DEFAULT_PLUGIN_ID;
@ -154,18 +158,12 @@ export default async function pluginContentBlog(
},
markdownConfig: siteConfig.markdown,
resolveMarkdownLink: ({linkPathname, sourceFilePath}) => {
const permalink = resolveMarkdownLinkPathname(linkPathname, {
return resolveMarkdownLinkPathname(linkPathname, {
sourceFilePath,
sourceToPermalink: contentHelpers.sourceToPermalink,
siteDir,
contentPaths,
});
if (permalink === null) {
logger.report(
onBrokenMarkdownLinks,
)`Blog markdown link couldn't be resolved: (url=${linkPathname}) in source file path=${sourceFilePath}`;
}
return permalink;
},
});

View File

@ -1,6 +1,6 @@
{
"name": "@docusaurus/plugin-content-docs",
"version": "3.8.0",
"version": "3.9.2",
"description": "Docs plugin for Docusaurus.",
"main": "lib/index.js",
"sideEffects": false,
@ -35,15 +35,15 @@
},
"license": "MIT",
"dependencies": {
"@docusaurus/core": "3.8.0",
"@docusaurus/logger": "3.8.0",
"@docusaurus/mdx-loader": "3.8.0",
"@docusaurus/module-type-aliases": "3.8.0",
"@docusaurus/theme-common": "3.8.0",
"@docusaurus/types": "3.8.0",
"@docusaurus/utils": "3.8.0",
"@docusaurus/utils-common": "3.8.0",
"@docusaurus/utils-validation": "3.8.0",
"@docusaurus/core": "3.9.2",
"@docusaurus/logger": "3.9.2",
"@docusaurus/mdx-loader": "3.9.2",
"@docusaurus/module-type-aliases": "3.9.2",
"@docusaurus/theme-common": "3.9.2",
"@docusaurus/types": "3.9.2",
"@docusaurus/utils": "3.9.2",
"@docusaurus/utils-common": "3.9.2",
"@docusaurus/utils-validation": "3.9.2",
"@types/react-router-config": "^5.0.7",
"combine-promises": "^1.1.0",
"fs-extra": "^11.1.1",
@ -65,6 +65,6 @@
"react-dom": "^18.0.0 || ^19.0.0"
},
"engines": {
"node": ">=18.0"
"node": ">=20.0"
}
}

View File

@ -2,6 +2,8 @@
id: hello-2
title: Hello 2
sidebar_label: Hello 2 From Doc
sidebar_class_name: front-matter-class-name
sidebar_custom_props: {custom: "from front matter"}
---
Hello World 2!

View File

@ -8,7 +8,9 @@
{
"id": "hello-2",
"type": "doc",
"label": "Hello Two"
"label": "Hello Two",
"className": "class-name-from-sidebars.json",
"customProps": {"test": "from sidebars.json"}
}
]
}

View File

@ -8,6 +8,10 @@ exports[`sidebar site with undefined sidebar 1`] = `
"type": "doc",
},
{
"className": "front-matter-class-name",
"customProps": {
"custom": "from front matter",
},
"id": "hello-2",
"label": "Hello 2 From Doc",
"type": "doc",
@ -2001,17 +2005,6 @@ exports[`simple website content: route config 1`] = `
]
`;
exports[`simple website getPathToWatch 1`] = `
[
"sidebars.json",
"i18n/en/docusaurus-plugin-content-docs/current/**/*.{md,mdx}",
"docs/**/*.{md,mdx}",
"i18n/en/docusaurus-plugin-content-docs/current/tags.yml",
"docs/tags.yml",
"docs/**/_category_.{json,yml,yaml}",
]
`;
exports[`site with custom sidebar items generator sidebar is autogenerated according to a custom sidebarItemsGenerator 1`] = `
{
"defaultSidebar": [
@ -3323,23 +3316,6 @@ exports[`versioned website (community) content: route config 1`] = `
]
`;
exports[`versioned website (community) getPathToWatch 1`] = `
[
"community_sidebars.json",
"i18n/en/docusaurus-plugin-content-docs-community/current/**/*.{md,mdx}",
"community/**/*.{md,mdx}",
"i18n/en/docusaurus-plugin-content-docs-community/current/tags.yml",
"community/tags.yml",
"community/**/_category_.{json,yml,yaml}",
"community_versioned_sidebars/version-1.0.0-sidebars.json",
"i18n/en/docusaurus-plugin-content-docs-community/version-1.0.0/**/*.{md,mdx}",
"community_versioned_docs/version-1.0.0/**/*.{md,mdx}",
"i18n/en/docusaurus-plugin-content-docs-community/version-1.0.0/tags.yml",
"community_versioned_docs/version-1.0.0/tags.yml",
"community_versioned_docs/version-1.0.0/**/_category_.{json,yml,yaml}",
]
`;
exports[`versioned website content 1`] = `
{
"description": "This is next version of bar.",
@ -5205,32 +5181,3 @@ exports[`versioned website content: withSlugs version sidebars 1`] = `
],
}
`;
exports[`versioned website getPathToWatch 1`] = `
[
"sidebars.json",
"i18n/en/docusaurus-plugin-content-docs/current/**/*.{md,mdx}",
"docs/**/*.{md,mdx}",
"i18n/en/docusaurus-plugin-content-docs/current/tags.yml",
"docs/tags.yml",
"docs/**/_category_.{json,yml,yaml}",
"versioned_sidebars/version-1.0.1-sidebars.json",
"i18n/en/docusaurus-plugin-content-docs/version-1.0.1/**/*.{md,mdx}",
"versioned_docs/version-1.0.1/**/*.{md,mdx}",
"i18n/en/docusaurus-plugin-content-docs/version-1.0.1/tags.yml",
"versioned_docs/version-1.0.1/tags.yml",
"versioned_docs/version-1.0.1/**/_category_.{json,yml,yaml}",
"versioned_sidebars/version-1.0.0-sidebars.json",
"i18n/en/docusaurus-plugin-content-docs/version-1.0.0/**/*.{md,mdx}",
"versioned_docs/version-1.0.0/**/*.{md,mdx}",
"i18n/en/docusaurus-plugin-content-docs/version-1.0.0/tags.yml",
"versioned_docs/version-1.0.0/tags.yml",
"versioned_docs/version-1.0.0/**/_category_.{json,yml,yaml}",
"versioned_sidebars/version-withSlugs-sidebars.json",
"i18n/en/docusaurus-plugin-content-docs/version-withSlugs/**/*.{md,mdx}",
"versioned_docs/version-withSlugs/**/*.{md,mdx}",
"i18n/en/docusaurus-plugin-content-docs/version-withSlugs/tags.yml",
"versioned_docs/version-withSlugs/tags.yml",
"versioned_docs/version-withSlugs/**/_category_.{json,yml,yaml}",
]
`;

View File

@ -5,27 +5,27 @@ exports[`getLoadedContentTranslationFiles returns translation files 1`] = `
{
"content": {
"sidebar.docs.category.Getting started": {
"description": "The label for category Getting started in sidebar docs",
"description": "The label for category 'Getting started' in sidebar 'docs'",
"message": "Getting started",
},
"sidebar.docs.category.Getting started.link.generated-index.description": {
"description": "The generated-index page description for category Getting started in sidebar docs",
"description": "The generated-index page description for category 'Getting started' in sidebar 'docs'",
"message": "Getting started index description",
},
"sidebar.docs.category.Getting started.link.generated-index.title": {
"description": "The generated-index page title for category Getting started in sidebar docs",
"description": "The generated-index page title for category 'Getting started' in sidebar 'docs'",
"message": "Getting started index title",
},
"sidebar.docs.doc.Second doc translatable": {
"description": "The label for the doc item Second doc translatable in sidebar docs, linking to the doc doc2",
"description": "The label for the doc item 'Second doc translatable' in sidebar 'docs', linking to the doc doc2",
"message": "Second doc translatable",
},
"sidebar.docs.link.Link label": {
"description": "The label for link Link label in sidebar docs, linking to https://facebook.com",
"description": "The label for link 'Link label' in sidebar 'docs', linking to 'https://facebook.com'",
"message": "Link label",
},
"sidebar.otherSidebar.doc.Fifth doc translatable": {
"description": "The label for the doc item Fifth doc translatable in sidebar otherSidebar, linking to the doc doc5",
"description": "The label for the doc item 'Fifth doc translatable' in sidebar 'otherSidebar', linking to the doc doc5",
"message": "Fifth doc translatable",
},
"version.label": {
@ -38,27 +38,27 @@ exports[`getLoadedContentTranslationFiles returns translation files 1`] = `
{
"content": {
"sidebar.docs.category.Getting started": {
"description": "The label for category Getting started in sidebar docs",
"description": "The label for category 'Getting started' in sidebar 'docs'",
"message": "Getting started",
},
"sidebar.docs.category.Getting started.link.generated-index.description": {
"description": "The generated-index page description for category Getting started in sidebar docs",
"description": "The generated-index page description for category 'Getting started' in sidebar 'docs'",
"message": "Getting started index description",
},
"sidebar.docs.category.Getting started.link.generated-index.title": {
"description": "The generated-index page title for category Getting started in sidebar docs",
"description": "The generated-index page title for category 'Getting started' in sidebar 'docs'",
"message": "Getting started index title",
},
"sidebar.docs.doc.Second doc translatable": {
"description": "The label for the doc item Second doc translatable in sidebar docs, linking to the doc doc2",
"description": "The label for the doc item 'Second doc translatable' in sidebar 'docs', linking to the doc doc2",
"message": "Second doc translatable",
},
"sidebar.docs.link.Link label": {
"description": "The label for link Link label in sidebar docs, linking to https://facebook.com",
"description": "The label for link 'Link label' in sidebar 'docs', linking to 'https://facebook.com'",
"message": "Link label",
},
"sidebar.otherSidebar.doc.Fifth doc translatable": {
"description": "The label for the doc item Fifth doc translatable in sidebar otherSidebar, linking to the doc doc5",
"description": "The label for the doc item 'Fifth doc translatable' in sidebar 'otherSidebar', linking to the doc doc5",
"message": "Fifth doc translatable",
},
"version.label": {
@ -71,27 +71,27 @@ exports[`getLoadedContentTranslationFiles returns translation files 1`] = `
{
"content": {
"sidebar.docs.category.Getting started": {
"description": "The label for category Getting started in sidebar docs",
"description": "The label for category 'Getting started' in sidebar 'docs'",
"message": "Getting started",
},
"sidebar.docs.category.Getting started.link.generated-index.description": {
"description": "The generated-index page description for category Getting started in sidebar docs",
"description": "The generated-index page description for category 'Getting started' in sidebar 'docs'",
"message": "Getting started index description",
},
"sidebar.docs.category.Getting started.link.generated-index.title": {
"description": "The generated-index page title for category Getting started in sidebar docs",
"description": "The generated-index page title for category 'Getting started' in sidebar 'docs'",
"message": "Getting started index title",
},
"sidebar.docs.doc.Second doc translatable": {
"description": "The label for the doc item Second doc translatable in sidebar docs, linking to the doc doc2",
"description": "The label for the doc item 'Second doc translatable' in sidebar 'docs', linking to the doc doc2",
"message": "Second doc translatable",
},
"sidebar.docs.link.Link label": {
"description": "The label for link Link label in sidebar docs, linking to https://facebook.com",
"description": "The label for link 'Link label' in sidebar 'docs', linking to 'https://facebook.com'",
"message": "Link label",
},
"sidebar.otherSidebar.doc.Fifth doc translatable": {
"description": "The label for the doc item Fifth doc translatable in sidebar otherSidebar, linking to the doc doc5",
"description": "The label for the doc item 'Fifth doc translatable' in sidebar 'otherSidebar', linking to the doc doc5",
"message": "Fifth doc translatable",
},
"version.label": {

View File

@ -13,6 +13,7 @@ import {
posixPath,
DEFAULT_PLUGIN_ID,
LAST_UPDATE_FALLBACK,
getLocaleConfig,
} from '@docusaurus/utils';
import {getTagsFile} from '@docusaurus/utils-validation';
import {createSidebarsUtils} from '../sidebars/utils';
@ -25,7 +26,7 @@ import {
type DocEnv,
} from '../docs';
import {loadSidebars} from '../sidebars';
import {readVersionsMetadata} from '../versions';
import {readVersionsMetadata} from '../versions/version';
import {DEFAULT_OPTIONS} from '../options';
import type {Sidebars} from '../sidebars/types';
import type {DocFile} from '../types';
@ -842,7 +843,11 @@ describe('simple site', () => {
describe('versioned site', () => {
async function loadSite(
loadSiteOptions: {options: Partial<PluginOptions>; locale?: string} = {
loadSiteOptions: {
options?: Partial<PluginOptions>;
locale?: string;
translate?: boolean;
} = {
options: {},
},
) {
@ -851,6 +856,10 @@ describe('versioned site', () => {
siteDir,
locale: loadSiteOptions.locale,
});
// hacky but gets the job done
getLocaleConfig(context.i18n).translate = loadSiteOptions.translate ?? true;
const options = {
id: DEFAULT_PLUGIN_ID,
...DEFAULT_OPTIONS,
@ -1055,6 +1064,43 @@ describe('versioned site', () => {
});
});
it('versioned docs - translate: false', async () => {
const {version100TestUtils} = await loadSite({
translate: false,
});
// This doc is translated, but we still read the original
await version100TestUtils.testMeta(path.join('hello.md'), {
id: 'hello',
sourceDirName: '.',
permalink: '/docs/1.0.0/',
slug: '/',
title: 'hello',
description: 'Hello 1.0.0 !',
frontMatter: {
slug: '/',
tags: ['inlineTag-v1.0.0', 'globalTag-v1.0.0'],
},
version: '1.0.0',
source: '@site/versioned_docs/version-1.0.0/hello.md',
tags: [
{
description: undefined,
inline: true,
label: 'inlineTag-v1.0.0',
permalink: '/docs/1.0.0/tags/inline-tag-v-1-0-0',
},
{
description: 'globalTag-v1.0.0 description',
inline: false,
label: 'globalTag-v1.0.0 label',
permalink: '/docs/1.0.0/tags/globalTag-v1.0.0 permalink',
},
],
unlisted: false,
});
});
it('next doc slugs', async () => {
const {currentVersionTestUtils} = await loadSite();

View File

@ -186,10 +186,24 @@ describe('validateDocFrontMatter slug', () => {
});
});
describe('validateDocFrontMatter sidebar_key', () => {
testField({
prefix: 'sidebar_key',
validFrontMatters: [
{sidebar_key: undefined},
{sidebar_key: 'Awesome docs'},
],
invalidFrontMatters: [[{sidebar_key: ''}, 'is not allowed to be empty']],
});
});
describe('validateDocFrontMatter sidebar_label', () => {
testField({
prefix: 'sidebar_label',
validFrontMatters: [{sidebar_label: 'Awesome docs'}],
validFrontMatters: [
{sidebar_label: undefined},
{sidebar_label: 'Awesome docs'},
],
invalidFrontMatters: [[{sidebar_label: ''}, 'is not allowed to be empty']],
});
});

View File

@ -18,7 +18,7 @@ import {
createConfigureWebpackUtils,
} from '@docusaurus/core/src/webpack/configure';
import {sortRoutes} from '@docusaurus/core/src/server/plugins/routeConfig';
import {posixPath} from '@docusaurus/utils';
import {getLocaleConfig, posixPath} from '@docusaurus/utils';
import {normalizePluginOptions} from '@docusaurus/utils-validation';
import {fromPartial} from '@total-typescript/shoehorn';
@ -219,9 +219,13 @@ describe('empty/no docs website', () => {
});
describe('simple website', () => {
async function loadSite() {
async function loadSite({translate}: {translate?: boolean} = {}) {
const siteDir = path.join(__dirname, '__fixtures__', 'simple-site');
const context = await loadContext({siteDir});
// hacky but gets the job done
getLocaleConfig(context.i18n).translate = translate ?? true;
const sidebarPath = path.join(siteDir, 'sidebars.json');
const options = validateOptions({
validate: normalizePluginOptions as Validate<Options, PluginOptions>,
@ -233,7 +237,20 @@ describe('simple website', () => {
const plugin = await pluginContentDocs(context, options);
const pluginContentDir = path.join(context.generatedFilesDir, plugin.name);
return {siteDir, context, sidebarPath, plugin, options, pluginContentDir};
return {
siteDir,
context,
sidebarPath,
plugin,
options,
pluginContentDir,
getPathsToWatch: () => {
const pathToWatch = plugin.getPathsToWatch!();
return pathToWatch.map((filepath) =>
posixPath(path.relative(siteDir, filepath)),
);
},
};
}
it('extendCli - docsVersion', async () => {
@ -242,8 +259,6 @@ describe('simple website', () => {
.spyOn(cliDocs, 'cliDocsVersionCommand')
.mockImplementation(async () => {});
const cli = new commander.Command();
// @ts-expect-error: in actual usage, we pass the static commander instead
// of the new command
plugin.extendCli!(cli);
cli.parse(['node', 'test', 'docs:version', '1.0.0']);
expect(mock).toHaveBeenCalledTimes(1);
@ -251,25 +266,48 @@ describe('simple website', () => {
mock.mockRestore();
});
it('getPathToWatch', async () => {
const {siteDir, plugin} = await loadSite();
describe('getPathToWatch', () => {
it('translate: false', async () => {
const {getPathsToWatch} = await loadSite({translate: false});
expect(getPathsToWatch()).toMatchInlineSnapshot(`
[
"sidebars.json",
"docs/**/*.{md,mdx}",
"docs/tags.yml",
"docs/**/_category_.{json,yml,yaml}",
]
`);
});
const pathToWatch = plugin.getPathsToWatch!();
const matchPattern = pathToWatch.map((filepath) =>
posixPath(path.relative(siteDir, filepath)),
);
expect(matchPattern).toMatchSnapshot();
expect(isMatch('docs/hello.md', matchPattern)).toBe(true);
expect(isMatch('docs/hello.mdx', matchPattern)).toBe(true);
expect(isMatch('docs/foo/bar.md', matchPattern)).toBe(true);
expect(isMatch('docs/hello.js', matchPattern)).toBe(false);
expect(isMatch('docs/super.mdl', matchPattern)).toBe(false);
expect(isMatch('docs/mdx', matchPattern)).toBe(false);
expect(isMatch('docs/headingAsTitle.md', matchPattern)).toBe(true);
expect(isMatch('sidebars.json', matchPattern)).toBe(true);
expect(isMatch('versioned_docs/hello.md', matchPattern)).toBe(false);
expect(isMatch('hello.md', matchPattern)).toBe(false);
expect(isMatch('super/docs/hello.md', matchPattern)).toBe(false);
it('translate: true', async () => {
const {getPathsToWatch} = await loadSite({translate: true});
expect(getPathsToWatch()).toMatchInlineSnapshot(`
[
"sidebars.json",
"i18n/en/docusaurus-plugin-content-docs/current/**/*.{md,mdx}",
"docs/**/*.{md,mdx}",
"i18n/en/docusaurus-plugin-content-docs/current/tags.yml",
"docs/tags.yml",
"docs/**/_category_.{json,yml,yaml}",
]
`);
});
it('returns patterns matching docs', async () => {
const {getPathsToWatch} = await loadSite();
const matchPattern = getPathsToWatch();
expect(isMatch('docs/hello.md', matchPattern)).toBe(true);
expect(isMatch('docs/hello.mdx', matchPattern)).toBe(true);
expect(isMatch('docs/foo/bar.md', matchPattern)).toBe(true);
expect(isMatch('docs/hello.js', matchPattern)).toBe(false);
expect(isMatch('docs/super.mdl', matchPattern)).toBe(false);
expect(isMatch('docs/mdx', matchPattern)).toBe(false);
expect(isMatch('docs/headingAsTitle.md', matchPattern)).toBe(true);
expect(isMatch('sidebars.json', matchPattern)).toBe(true);
expect(isMatch('versioned_docs/hello.md', matchPattern)).toBe(false);
expect(isMatch('hello.md', matchPattern)).toBe(false);
expect(isMatch('super/docs/hello.md', matchPattern)).toBe(false);
});
});
it('configureWebpack', async () => {
@ -329,9 +367,13 @@ describe('simple website', () => {
});
describe('versioned website', () => {
async function loadSite() {
async function loadSite({translate}: {translate?: boolean} = {}) {
const siteDir = path.join(__dirname, '__fixtures__', 'versioned-site');
const context = await loadContext({siteDir});
// hacky but gets the job done
getLocaleConfig(context.i18n).translate = translate ?? true;
const sidebarPath = path.join(siteDir, 'sidebars.json');
const routeBasePath = 'docs';
const options = validateOptions({
@ -356,6 +398,13 @@ describe('versioned website', () => {
options,
plugin,
pluginContentDir,
getPathsToWatch: () => {
const pathToWatch = plugin.getPathsToWatch!();
return pathToWatch.map((filepath) =>
posixPath(path.relative(siteDir, filepath)),
);
},
};
}
@ -365,8 +414,6 @@ describe('versioned website', () => {
.spyOn(cliDocs, 'cliDocsVersionCommand')
.mockImplementation(async () => {});
const cli = new commander.Command();
// @ts-expect-error: in actual usage, we pass the static commander instead
// of the new command
plugin.extendCli!(cli);
cli.parse(['node', 'test', 'docs:version', '2.0.0']);
expect(mock).toHaveBeenCalledTimes(1);
@ -374,48 +421,101 @@ describe('versioned website', () => {
mock.mockRestore();
});
it('getPathToWatch', async () => {
const {siteDir, plugin} = await loadSite();
const pathToWatch = plugin.getPathsToWatch!();
const matchPattern = pathToWatch.map((filepath) =>
posixPath(path.relative(siteDir, filepath)),
);
expect(matchPattern).not.toEqual([]);
expect(matchPattern).toMatchSnapshot();
expect(isMatch('docs/hello.md', matchPattern)).toBe(true);
expect(isMatch('docs/hello.mdx', matchPattern)).toBe(true);
expect(isMatch('docs/foo/bar.md', matchPattern)).toBe(true);
expect(isMatch('sidebars.json', matchPattern)).toBe(true);
expect(isMatch('versioned_docs/version-1.0.0/hello.md', matchPattern)).toBe(
true,
);
expect(
isMatch('versioned_docs/version-1.0.0/foo/bar.md', matchPattern),
).toBe(true);
expect(
isMatch('versioned_sidebars/version-1.0.0-sidebars.json', matchPattern),
).toBe(true);
describe('getPathToWatch', () => {
it('translate: false', async () => {
const {getPathsToWatch} = await loadSite({translate: false});
expect(getPathsToWatch()).toMatchInlineSnapshot(`
[
"sidebars.json",
"docs/**/*.{md,mdx}",
"docs/tags.yml",
"docs/**/_category_.{json,yml,yaml}",
"versioned_sidebars/version-1.0.1-sidebars.json",
"versioned_docs/version-1.0.1/**/*.{md,mdx}",
"versioned_docs/version-1.0.1/tags.yml",
"versioned_docs/version-1.0.1/**/_category_.{json,yml,yaml}",
"versioned_sidebars/version-1.0.0-sidebars.json",
"versioned_docs/version-1.0.0/**/*.{md,mdx}",
"versioned_docs/version-1.0.0/tags.yml",
"versioned_docs/version-1.0.0/**/_category_.{json,yml,yaml}",
"versioned_sidebars/version-withSlugs-sidebars.json",
"versioned_docs/version-withSlugs/**/*.{md,mdx}",
"versioned_docs/version-withSlugs/tags.yml",
"versioned_docs/version-withSlugs/**/_category_.{json,yml,yaml}",
]
`);
});
// Non existing version
expect(
isMatch('versioned_docs/version-2.0.0/foo/bar.md', matchPattern),
).toBe(false);
expect(isMatch('versioned_docs/version-2.0.0/hello.md', matchPattern)).toBe(
false,
);
expect(
isMatch('versioned_sidebars/version-2.0.0-sidebars.json', matchPattern),
).toBe(false);
it('translate: true', async () => {
const {getPathsToWatch} = await loadSite({translate: true});
expect(getPathsToWatch()).toMatchInlineSnapshot(`
[
"sidebars.json",
"i18n/en/docusaurus-plugin-content-docs/current/**/*.{md,mdx}",
"docs/**/*.{md,mdx}",
"i18n/en/docusaurus-plugin-content-docs/current/tags.yml",
"docs/tags.yml",
"docs/**/_category_.{json,yml,yaml}",
"versioned_sidebars/version-1.0.1-sidebars.json",
"i18n/en/docusaurus-plugin-content-docs/version-1.0.1/**/*.{md,mdx}",
"versioned_docs/version-1.0.1/**/*.{md,mdx}",
"i18n/en/docusaurus-plugin-content-docs/version-1.0.1/tags.yml",
"versioned_docs/version-1.0.1/tags.yml",
"versioned_docs/version-1.0.1/**/_category_.{json,yml,yaml}",
"versioned_sidebars/version-1.0.0-sidebars.json",
"i18n/en/docusaurus-plugin-content-docs/version-1.0.0/**/*.{md,mdx}",
"versioned_docs/version-1.0.0/**/*.{md,mdx}",
"i18n/en/docusaurus-plugin-content-docs/version-1.0.0/tags.yml",
"versioned_docs/version-1.0.0/tags.yml",
"versioned_docs/version-1.0.0/**/_category_.{json,yml,yaml}",
"versioned_sidebars/version-withSlugs-sidebars.json",
"i18n/en/docusaurus-plugin-content-docs/version-withSlugs/**/*.{md,mdx}",
"versioned_docs/version-withSlugs/**/*.{md,mdx}",
"i18n/en/docusaurus-plugin-content-docs/version-withSlugs/tags.yml",
"versioned_docs/version-withSlugs/tags.yml",
"versioned_docs/version-withSlugs/**/_category_.{json,yml,yaml}",
]
`);
});
expect(isMatch('docs/hello.js', matchPattern)).toBe(false);
expect(isMatch('docs/super.mdl', matchPattern)).toBe(false);
expect(isMatch('docs/mdx', matchPattern)).toBe(false);
expect(isMatch('hello.md', matchPattern)).toBe(false);
expect(isMatch('super/docs/hello.md', matchPattern)).toBe(false);
it('returns patterns matching docs', async () => {
const {getPathsToWatch} = await loadSite();
const matchPattern = getPathsToWatch();
expect(isMatch('docs/hello.md', matchPattern)).toBe(true);
expect(isMatch('docs/hello.mdx', matchPattern)).toBe(true);
expect(isMatch('docs/foo/bar.md', matchPattern)).toBe(true);
expect(isMatch('sidebars.json', matchPattern)).toBe(true);
expect(
isMatch('versioned_docs/version-1.0.0/hello.md', matchPattern),
).toBe(true);
expect(
isMatch('versioned_docs/version-1.0.0/foo/bar.md', matchPattern),
).toBe(true);
expect(
isMatch('versioned_sidebars/version-1.0.0-sidebars.json', matchPattern),
).toBe(true);
// Non existing version
expect(
isMatch('versioned_docs/version-2.0.0/foo/bar.md', matchPattern),
).toBe(false);
expect(
isMatch('versioned_docs/version-2.0.0/hello.md', matchPattern),
).toBe(false);
expect(
isMatch('versioned_sidebars/version-2.0.0-sidebars.json', matchPattern),
).toBe(false);
expect(isMatch('docs/hello.js', matchPattern)).toBe(false);
expect(isMatch('docs/super.mdl', matchPattern)).toBe(false);
expect(isMatch('docs/mdx', matchPattern)).toBe(false);
expect(isMatch('hello.md', matchPattern)).toBe(false);
expect(isMatch('super/docs/hello.md', matchPattern)).toBe(false);
});
});
it('content', async () => {
const {plugin, pluginContentDir} = await loadSite();
const {plugin, pluginContentDir} = await loadSite({translate: true});
const content = await plugin.loadContent!();
expect(content.loadedVersions).toHaveLength(4);
const [currentVersion, version101, version100, versionWithSlugs] =
@ -453,9 +553,13 @@ describe('versioned website', () => {
});
describe('versioned website (community)', () => {
async function loadSite() {
async function loadSite({translate}: {translate?: boolean} = {}) {
const siteDir = path.join(__dirname, '__fixtures__', 'versioned-site');
const context = await loadContext({siteDir});
// hacky but gets the job done
getLocaleConfig(context.i18n).translate = translate ?? true;
const sidebarPath = path.join(siteDir, 'community_sidebars.json');
const routeBasePath = 'community';
const pluginId = 'community';
@ -479,6 +583,13 @@ describe('versioned website (community)', () => {
options,
plugin,
pluginContentDir,
getPathsToWatch: () => {
const pathToWatch = plugin.getPathsToWatch!();
return pathToWatch.map((filepath) =>
posixPath(path.relative(siteDir, filepath)),
);
},
};
}
@ -488,8 +599,6 @@ describe('versioned website (community)', () => {
.spyOn(cliDocs, 'cliDocsVersionCommand')
.mockImplementation(async () => {});
const cli = new commander.Command();
// @ts-expect-error: in actual usage, we pass the static commander instead
// of the new command
plugin.extendCli!(cli);
cli.parse(['node', 'test', `docs:version:${pluginId}`, '2.0.0']);
expect(mock).toHaveBeenCalledTimes(1);
@ -497,34 +606,67 @@ describe('versioned website (community)', () => {
mock.mockRestore();
});
it('getPathToWatch', async () => {
const {siteDir, plugin} = await loadSite();
const pathToWatch = plugin.getPathsToWatch!();
const matchPattern = pathToWatch.map((filepath) =>
posixPath(path.relative(siteDir, filepath)),
);
expect(matchPattern).not.toEqual([]);
expect(matchPattern).toMatchSnapshot();
expect(isMatch('community/team.md', matchPattern)).toBe(true);
expect(
isMatch('community_versioned_docs/version-1.0.0/team.md', matchPattern),
).toBe(true);
describe('getPathToWatch', () => {
it('translate: false', async () => {
const {getPathsToWatch} = await loadSite({translate: false});
expect(getPathsToWatch()).toMatchInlineSnapshot(`
[
"community_sidebars.json",
"community/**/*.{md,mdx}",
"community/tags.yml",
"community/**/_category_.{json,yml,yaml}",
"community_versioned_sidebars/version-1.0.0-sidebars.json",
"community_versioned_docs/version-1.0.0/**/*.{md,mdx}",
"community_versioned_docs/version-1.0.0/tags.yml",
"community_versioned_docs/version-1.0.0/**/_category_.{json,yml,yaml}",
]
`);
});
// Non existing version
expect(
isMatch('community_versioned_docs/version-2.0.0/team.md', matchPattern),
).toBe(false);
expect(
isMatch(
'community_versioned_sidebars/version-2.0.0-sidebars.json',
matchPattern,
),
).toBe(false);
it('translate: true', async () => {
const {getPathsToWatch} = await loadSite({translate: true});
expect(getPathsToWatch()).toMatchInlineSnapshot(`
[
"community_sidebars.json",
"i18n/en/docusaurus-plugin-content-docs-community/current/**/*.{md,mdx}",
"community/**/*.{md,mdx}",
"i18n/en/docusaurus-plugin-content-docs-community/current/tags.yml",
"community/tags.yml",
"community/**/_category_.{json,yml,yaml}",
"community_versioned_sidebars/version-1.0.0-sidebars.json",
"i18n/en/docusaurus-plugin-content-docs-community/version-1.0.0/**/*.{md,mdx}",
"community_versioned_docs/version-1.0.0/**/*.{md,mdx}",
"i18n/en/docusaurus-plugin-content-docs-community/version-1.0.0/tags.yml",
"community_versioned_docs/version-1.0.0/tags.yml",
"community_versioned_docs/version-1.0.0/**/_category_.{json,yml,yaml}",
]
`);
});
expect(isMatch('community/team.js', matchPattern)).toBe(false);
expect(
isMatch('community_versioned_docs/version-1.0.0/team.js', matchPattern),
).toBe(false);
it('returns patterns matching docs', async () => {
const {getPathsToWatch} = await loadSite();
const matchPattern = getPathsToWatch();
expect(isMatch('community/team.md', matchPattern)).toBe(true);
expect(
isMatch('community_versioned_docs/version-1.0.0/team.md', matchPattern),
).toBe(true);
// Non existing version
expect(
isMatch('community_versioned_docs/version-2.0.0/team.md', matchPattern),
).toBe(false);
expect(
isMatch(
'community_versioned_sidebars/version-2.0.0-sidebars.json',
matchPattern,
),
).toBe(false);
expect(isMatch('community/team.js', matchPattern)).toBe(false);
expect(
isMatch('community_versioned_docs/version-1.0.0/team.js', matchPattern),
).toBe(false);
});
});
it('content', async () => {
@ -582,14 +724,16 @@ describe('site with doc label', () => {
);
});
it('sidebar_label in doc has higher precedence over label in sidebar.json', async () => {
it('frontMatter.sidebar_* data in doc has higher precedence over sidebar.json data', async () => {
const {content} = await loadSite();
const loadedVersion = content.loadedVersions[0]!;
const sidebarProps = toSidebarsProp(loadedVersion);
expect((sidebarProps.docs![1] as PropSidebarItemLink).label).toBe(
'Hello 2 From Doc',
);
const item = sidebarProps.docs![1] as PropSidebarItemLink;
expect(item.label).toBe('Hello 2 From Doc');
expect(item.className).toBe('front-matter-class-name');
expect(item.customProps).toStrictEqual({custom: 'from front matter'});
});
});

View File

@ -5,7 +5,12 @@
* LICENSE file in the root directory of this source tree.
*/
import {toSidebarDocItemLinkProp, toTagDocListProp} from '../props';
import {fromPartial} from '@total-typescript/shoehorn';
import {
toSidebarDocItemLinkProp,
toSidebarsProp,
toTagDocListProp,
} from '../props';
describe('toTagDocListProp', () => {
type Params = Parameters<typeof toTagDocListProp>[0];
@ -132,3 +137,123 @@ describe('toSidebarDocItemLinkProp', () => {
).toBe(false);
});
});
describe('toSidebarsProp', () => {
type Params = Parameters<typeof toSidebarsProp>[0];
it('works', () => {
const params: Params = {
docs: [
fromPartial({
id: 'doc-id-1',
permalink: '/doc-1',
title: 'Doc 1 title',
frontMatter: {},
}),
],
sidebars: {
mySidebar: [
{
type: 'link',
label: 'Example link',
key: 'link-example-key',
href: 'https://example.com',
},
{
type: 'ref',
label: 'Doc 1 ref',
key: 'ref-with-doc-id-1',
id: 'doc-id-1',
},
{
type: 'ref',
id: 'doc-id-1',
// no label/key on purpose
},
{
type: 'category',
label: 'My category',
key: 'my-category-key',
collapsible: false,
collapsed: true,
items: [
{
type: 'doc',
label: 'Doc 1',
key: 'doc-id-1',
id: 'doc-id-1',
},
{
type: 'doc',
id: 'doc-id-1',
// no label/key on purpose
},
],
},
],
},
};
const result = toSidebarsProp(params);
expect(result).toMatchInlineSnapshot(`
{
"mySidebar": [
{
"href": "https://example.com",
"key": "link-example-key",
"label": "Example link",
"type": "link",
},
{
"className": undefined,
"customProps": undefined,
"docId": "doc-id-1",
"href": "/doc-1",
"key": "ref-with-doc-id-1",
"label": "Doc 1 ref",
"type": "link",
"unlisted": undefined,
},
{
"className": undefined,
"customProps": undefined,
"docId": "doc-id-1",
"href": "/doc-1",
"label": "Doc 1 title",
"type": "link",
"unlisted": undefined,
},
{
"collapsed": true,
"collapsible": false,
"items": [
{
"className": undefined,
"customProps": undefined,
"docId": "doc-id-1",
"href": "/doc-1",
"key": "doc-id-1",
"label": "Doc 1",
"type": "link",
"unlisted": undefined,
},
{
"className": undefined,
"customProps": undefined,
"docId": "doc-id-1",
"href": "/doc-1",
"label": "Doc 1 title",
"type": "link",
"unlisted": undefined,
},
],
"key": "my-category-key",
"label": "My category",
"type": "category",
},
],
}
`);
});
});

View File

@ -16,6 +16,7 @@ import type {
LoadedContent,
LoadedVersion,
} from '@docusaurus/plugin-content-docs';
import type {Sidebar} from '../sidebars/types';
function createSampleDoc(doc: Pick<DocMetadata, 'id'>): DocMetadata {
return {
@ -41,7 +42,7 @@ function createSampleDoc(doc: Pick<DocMetadata, 'id'>): DocMetadata {
}
function createSampleVersion(
version: Pick<LoadedVersion, 'versionName'>,
version: Pick<LoadedVersion, 'versionName'> & Partial<LoadedVersion>,
): LoadedVersion {
return {
label: `${version.versionName} label`,
@ -152,6 +153,153 @@ describe('getLoadedContentTranslationFiles', () => {
it('returns translation files', () => {
expect(getSampleTranslationFiles()).toMatchSnapshot();
});
describe('translation key conflicts', () => {
function runTest({withUniqueKeys}: {withUniqueKeys: boolean}) {
const sidebarWithConflicts: Sidebar = [
{
type: 'doc',
id: 'doc4',
label: 'COMMON LABEL',
translatable: true,
...(withUniqueKeys && {key: 'key-doc4'}),
},
{
type: 'doc',
id: 'doc5',
label: 'COMMON LABEL',
translatable: true,
...(withUniqueKeys && {key: 'key-doc5'}),
},
{
type: 'ref',
id: 'doc4',
label: 'COMMON LABEL',
translatable: true,
...(withUniqueKeys && {key: 'key-ref4'}),
},
{
type: 'ref',
id: 'doc5',
label: 'COMMON LABEL',
translatable: true,
...(withUniqueKeys && {key: 'key-ref5'}),
},
{
type: 'category',
label: 'COMMON LABEL',
items: [],
collapsed: false,
collapsible: true,
...(withUniqueKeys && {key: 'key-cat1'}),
},
{
type: 'category',
label: 'COMMON LABEL',
items: [],
collapsed: false,
collapsible: true,
...(withUniqueKeys && {key: 'key-cat2'}),
},
{
type: 'link',
href: 'https://example.com',
label: 'COMMON LABEL',
...(withUniqueKeys && {key: 'key-link1'}),
},
{
type: 'link',
href: 'https://example.com',
label: 'COMMON LABEL',
...(withUniqueKeys && {key: 'key-link2'}),
},
];
const version = createSampleVersion({
versionName: CURRENT_VERSION_NAME,
sidebars: {
sidebarWithConflicts,
},
});
return getLoadedContentTranslationFiles({
loadedVersions: [version],
});
}
it('works on sidebar with translation key conflicts resolved by unique sidebar item keys', () => {
expect(runTest({withUniqueKeys: true})).toMatchInlineSnapshot(`
[
{
"content": {
"sidebar.sidebarWithConflicts.category.key-cat1": {
"description": "The label for category 'COMMON LABEL' in sidebar 'sidebarWithConflicts'",
"message": "COMMON LABEL",
},
"sidebar.sidebarWithConflicts.category.key-cat2": {
"description": "The label for category 'COMMON LABEL' in sidebar 'sidebarWithConflicts'",
"message": "COMMON LABEL",
},
"sidebar.sidebarWithConflicts.doc.key-doc4": {
"description": "The label for the doc item 'COMMON LABEL' in sidebar 'sidebarWithConflicts', linking to the doc doc4",
"message": "COMMON LABEL",
},
"sidebar.sidebarWithConflicts.doc.key-doc5": {
"description": "The label for the doc item 'COMMON LABEL' in sidebar 'sidebarWithConflicts', linking to the doc doc5",
"message": "COMMON LABEL",
},
"sidebar.sidebarWithConflicts.doc.key-ref4": {
"description": "The label for the doc item 'COMMON LABEL' in sidebar 'sidebarWithConflicts', linking to the doc doc4",
"message": "COMMON LABEL",
},
"sidebar.sidebarWithConflicts.doc.key-ref5": {
"description": "The label for the doc item 'COMMON LABEL' in sidebar 'sidebarWithConflicts', linking to the doc doc5",
"message": "COMMON LABEL",
},
"sidebar.sidebarWithConflicts.link.key-link1": {
"description": "The label for link 'COMMON LABEL' in sidebar 'sidebarWithConflicts', linking to 'https://example.com'",
"message": "COMMON LABEL",
},
"sidebar.sidebarWithConflicts.link.key-link2": {
"description": "The label for link 'COMMON LABEL' in sidebar 'sidebarWithConflicts', linking to 'https://example.com'",
"message": "COMMON LABEL",
},
"version.label": {
"description": "The label for version current",
"message": "current label",
},
},
"path": "current",
},
]
`);
});
it('throws on sidebar translation key conflicts', () => {
expect(() => runTest({withUniqueKeys: false}))
.toThrowErrorMatchingInlineSnapshot(`
"Multiple docs sidebar items produce the same translation key.
- \`sidebar.sidebarWithConflicts.category.COMMON LABEL\`: 2 duplicates found:
- COMMON LABEL (The label for category 'COMMON LABEL' in sidebar 'sidebarWithConflicts')
- COMMON LABEL (The label for category 'COMMON LABEL' in sidebar 'sidebarWithConflicts')
- \`sidebar.sidebarWithConflicts.link.COMMON LABEL\`: 2 duplicates found:
- COMMON LABEL (The label for link 'COMMON LABEL' in sidebar 'sidebarWithConflicts', linking to 'https://example.com')
- COMMON LABEL (The label for link 'COMMON LABEL' in sidebar 'sidebarWithConflicts', linking to 'https://example.com')
- \`sidebar.sidebarWithConflicts.doc.COMMON LABEL\`: 4 duplicates found:
- COMMON LABEL (The label for the doc item 'COMMON LABEL' in sidebar 'sidebarWithConflicts', linking to the doc doc4)
- COMMON LABEL (The label for the doc item 'COMMON LABEL' in sidebar 'sidebarWithConflicts', linking to the doc doc5)
- COMMON LABEL (The label for the doc item 'COMMON LABEL' in sidebar 'sidebarWithConflicts', linking to the doc doc4)
- COMMON LABEL (The label for the doc item 'COMMON LABEL' in sidebar 'sidebarWithConflicts', linking to the doc doc5)
To avoid translation key conflicts, use the \`key\` attribute on the sidebar items above to uniquely identify them.
When using autogenerated sidebars, you can provide a unique translation key by adding:
- the \`key\` attribute to category item metadata (\`_category_.json\` / \`_category_.yml\`)
- the \`sidebar_key\` attribute to doc item metadata (front matter in \`Category/index.mdx\`)"
`);
});
});
});
describe('translateLoadedContent', () => {

View File

@ -8,7 +8,7 @@
import fs from 'fs-extra';
import path from 'path';
import logger from '@docusaurus/logger';
import {DEFAULT_PLUGIN_ID} from '@docusaurus/utils';
import {DEFAULT_PLUGIN_ID, getLocaleConfig} from '@docusaurus/utils';
import {
getVersionsFilePath,
getVersionDocsDirPath,
@ -89,7 +89,7 @@ async function cliDocsVersionCommand(
const localizationDir = path.resolve(
siteDir,
i18n.path,
i18n.localeConfigs[locale]!.path,
getLocaleConfig(i18n, locale).path,
);
// Copy docs files.
const docsDir =

View File

@ -196,7 +196,9 @@ async function doProcessDocMetadata({
locale: context.i18n.currentLocale,
});
} else if (typeof options.editUrl === 'string') {
const isLocalized = contentPath === versionMetadata.contentPathLocalized;
const isLocalized =
typeof versionMetadata.contentPathLocalized !== 'undefined' &&
contentPath === versionMetadata.contentPathLocalized;
const baseVersionEditUrl =
isLocalized && options.editLocalizedFiles
? versionMetadata.editUrlLocalized

View File

@ -30,6 +30,7 @@ export const DocFrontMatterSchema = Joi.object<DocFrontMatter>({
// See https://github.com/facebook/docusaurus/issues/4591#issuecomment-822372398
description: Joi.string().allow(''),
slug: Joi.string(),
sidebar_key: Joi.string(),
sidebar_label: Joi.string(),
sidebar_position: Joi.number(),
sidebar_class_name: Joi.string(),

View File

@ -7,8 +7,6 @@
import path from 'path';
import fs from 'fs-extra';
import _ from 'lodash';
import logger from '@docusaurus/logger';
import {
normalizeUrl,
docuHash,
@ -17,30 +15,19 @@ import {
posixPath,
addTrailingPathSeparator,
createAbsoluteFilePathMatcher,
createSlugger,
resolveMarkdownLinkPathname,
DEFAULT_PLUGIN_ID,
type TagsFile,
} from '@docusaurus/utils';
import {
getTagsFile,
getTagsFilePathsToWatch,
} from '@docusaurus/utils-validation';
import {getTagsFilePathsToWatch} from '@docusaurus/utils-validation';
import {createMDXLoaderRule} from '@docusaurus/mdx-loader';
import {loadSidebars, resolveSidebarPathOption} from './sidebars';
import {resolveSidebarPathOption} from './sidebars';
import {CategoryMetadataFilenamePattern} from './sidebars/generator';
import {
readVersionDocs,
processDocMetadata,
addDocNavigation,
type DocEnv,
createDocsByIdIndex,
} from './docs';
import {type DocEnv} from './docs';
import {
getVersionFromSourceFilePath,
readVersionsMetadata,
toFullVersion,
} from './versions';
} from './versions/version';
import cliDocs from './cli';
import {VERSIONS_JSON_FILE} from './constants';
import {toGlobalDataVersion} from './globalData';
@ -49,19 +36,17 @@ import {
getLoadedContentTranslationFiles,
} from './translations';
import {createAllRoutes} from './routes';
import {createSidebarsUtils} from './sidebars/utils';
import {createContentHelpers} from './contentHelpers';
import {loadVersion} from './versions/loadVersion';
import type {
PluginOptions,
DocMetadataBase,
VersionMetadata,
DocFrontMatter,
LoadedContent,
LoadedVersion,
} from '@docusaurus/plugin-content-docs';
import type {LoadContext, Plugin} from '@docusaurus/types';
import type {DocFile, FullVersion} from './types';
import type {FullVersion} from './types';
import type {RuleSetRule} from 'webpack';
// MDX loader is not 100% deterministic, leading to cache invalidation issue
@ -77,6 +62,12 @@ async function createMdxLoaderDependencyFile({
options: PluginOptions;
versionsMetadata: VersionMetadata[];
}): Promise<string | undefined> {
// Disabled for unit tests, the side effect produces infinite watch loops
// TODO find a better way :/
if (process.env.NODE_ENV === 'test') {
return undefined;
}
const filePath = path.join(dataDir, '__mdx-loader-dependency.json');
// the cache is invalidated whenever this file content changes
const fileContent = {
@ -172,18 +163,12 @@ export default async function pluginContentDocs(
sourceFilePath,
versionsMetadata,
);
const permalink = resolveMarkdownLinkPathname(linkPathname, {
return resolveMarkdownLinkPathname(linkPathname, {
sourceFilePath,
sourceToPermalink: contentHelpers.sourceToPermalink,
siteDir,
contentPaths: version,
});
if (permalink === null) {
logger.report(
siteConfig.onBrokenMarkdownLinks,
)`Docs markdown link couldn't be resolved: (url=${linkPathname}) in source file path=${sourceFilePath} for version number=${version.versionName}`;
}
return permalink;
},
},
});
@ -243,102 +228,17 @@ export default async function pluginContentDocs(
},
async loadContent() {
async function loadVersionDocsBase(
versionMetadata: VersionMetadata,
tagsFile: TagsFile | null,
): Promise<DocMetadataBase[]> {
const docFiles = await readVersionDocs(versionMetadata, options);
if (docFiles.length === 0) {
throw new Error(
`Docs version "${
versionMetadata.versionName
}" has no docs! At least one doc should exist at "${path.relative(
siteDir,
versionMetadata.contentPath,
)}".`,
);
}
function processVersionDoc(docFile: DocFile) {
return processDocMetadata({
docFile,
versionMetadata,
context,
options,
env,
tagsFile,
});
}
return Promise.all(docFiles.map(processVersionDoc));
}
async function doLoadVersion(
versionMetadata: VersionMetadata,
): Promise<LoadedVersion> {
const tagsFile = await getTagsFile({
contentPaths: versionMetadata,
tags: options.tags,
});
const docsBase: DocMetadataBase[] = await loadVersionDocsBase(
versionMetadata,
tagsFile,
);
// TODO we only ever need draftIds in further code, not full draft items
// To simplify and prevent mistakes, avoid exposing draft
// replace draft=>draftIds in content loaded
const [drafts, docs] = _.partition(docsBase, (doc) => doc.draft);
const sidebars = await loadSidebars(versionMetadata.sidebarFilePath, {
sidebarItemsGenerator: options.sidebarItemsGenerator,
numberPrefixParser: options.numberPrefixParser,
docs,
drafts,
version: versionMetadata,
sidebarOptions: {
sidebarCollapsed: options.sidebarCollapsed,
sidebarCollapsible: options.sidebarCollapsible,
},
categoryLabelSlugger: createSlugger(),
});
const sidebarsUtils = createSidebarsUtils(sidebars);
const docsById = createDocsByIdIndex(docs);
const allDocIds = Object.keys(docsById);
sidebarsUtils.checkLegacyVersionedSidebarNames({
sidebarFilePath: versionMetadata.sidebarFilePath as string,
versionMetadata,
});
sidebarsUtils.checkSidebarsDocIds({
allDocIds,
sidebarFilePath: versionMetadata.sidebarFilePath as string,
versionMetadata,
});
return {
...versionMetadata,
docs: addDocNavigation({
docs,
sidebarsUtils,
}),
drafts,
sidebars,
};
}
async function loadVersion(versionMetadata: VersionMetadata) {
try {
return await doLoadVersion(versionMetadata);
} catch (err) {
logger.error`Loading of version failed for version name=${versionMetadata.versionName}`;
throw err;
}
}
return {
loadedVersions: await Promise.all(versionsMetadata.map(loadVersion)),
loadedVersions: await Promise.all(
versionsMetadata.map((versionMetadata) =>
loadVersion({
context,
options,
env,
versionMetadata,
}),
),
),
};
},

View File

@ -339,7 +339,15 @@ declare module '@docusaurus/plugin-content-docs' {
* @see {@link DocMetadata.slug}
*/
slug?: string;
/** Customizes the sidebar label for this doc. Will default to its title. */
/**
* Customizes the sidebar key for this doc,
* to uniquely identify it in translations.
*/
sidebar_key?: string;
/**
* Customizes the sidebar label for this doc.
* Will default to its title.
*/
sidebar_label?: string;
/**
* Controls the position of a doc inside the generated sidebar slice when

Some files were not shown because too many files have changed in this diff Show More