Compare commits

..

48 Commits
v3.9.2 ... main

Author SHA1 Message Date
Sébastien Lorber 5bc5c90dc7
chore(website): upgrade to DocSearch 4.4.0 + fix little website theming issues (#11626)
Some checks failed
Argos CI / take-screenshots (push) Has been cancelled
Build Hash Router / Build Hash Router (push) Has been cancelled
CodeQL / Analyze (javascript) (push) Has been cancelled
Continuous Releases / Continuous Releases (push) Has been cancelled
E2E Tests / E2E — Yarn v1 (20) (push) Has been cancelled
E2E Tests / E2E — Yarn v1 (20.0) (push) Has been cancelled
E2E Tests / E2E — Yarn v1 (22) (push) Has been cancelled
E2E Tests / E2E — Yarn v1 (24) (push) Has been cancelled
E2E Tests / E2E — Yarn v1 (25.1) (push) Has been cancelled
E2E Tests / E2E — Yarn v1 Windows (push) Has been cancelled
E2E Tests / E2E — Yarn Berry (node-modules, -s) (push) Has been cancelled
E2E Tests / E2E — Yarn Berry (node-modules, -st) (push) Has been cancelled
E2E Tests / E2E — Yarn Berry (pnp, -s) (push) Has been cancelled
E2E Tests / E2E — Yarn Berry (pnp, -st) (push) Has been cancelled
E2E Tests / E2E — npm (push) Has been cancelled
E2E Tests / E2E — pnpm (push) Has been cancelled
2025-12-23 16:29:00 +01:00
dependabot[bot] ee9dfd5d0b
chore(deps): bump preactjs/compressed-size-action from 2.8.0 to 2.9.0 - pin all remaining GitHub actions (#11625)
Some checks are pending
Argos CI / take-screenshots (push) Waiting to run
Build Hash Router / Build Hash Router (push) Waiting to run
CodeQL / Analyze (javascript) (push) Waiting to run
Continuous Releases / Continuous Releases (push) Waiting to run
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
Co-authored-by: Sébastien Lorber <slorber@users.noreply.github.com>
Co-authored-by: sebastien <lorber.sebastien@gmail.com>
2025-12-23 11:10:37 +01:00
Cesar Garcia 7f5d6122d2
fix(docs): breadcrumb APIs only return category/docs items, ignoring links (#11616)
Some checks failed
Argos CI / take-screenshots (push) Waiting to run
Build Hash Router / Build Hash Router (push) Waiting to run
CodeQL / Analyze (javascript) (push) Waiting to run
Continuous Releases / Continuous Releases (push) Waiting to run
E2E Tests / E2E — Yarn v1 (20) (push) Waiting to run
E2E Tests / E2E — Yarn v1 (20.0) (push) Waiting to run
E2E Tests / E2E — Yarn v1 (22) (push) Waiting to run
E2E Tests / E2E — Yarn v1 (24) (push) Waiting to run
E2E Tests / E2E — Yarn v1 (25.1) (push) Waiting to run
E2E Tests / E2E — Yarn v1 Windows (push) Waiting to run
E2E Tests / E2E — Yarn Berry (node-modules, -s) (push) Waiting to run
E2E Tests / E2E — Yarn Berry (node-modules, -st) (push) Waiting to run
E2E Tests / E2E — Yarn Berry (pnp, -s) (push) Waiting to run
E2E Tests / E2E — Yarn Berry (pnp, -st) (push) Waiting to run
E2E Tests / E2E — npm (push) Waiting to run
E2E Tests / E2E — pnpm (push) Waiting to run
Canary Release / Publish Canary (push) Has been cancelled
Co-authored-by: sebastien <lorber.sebastien@gmail.com>
2025-12-22 17:04:14 +01:00
Matthew Cheung 47a98a1d6e
feat(create-docusaurus): enable creation in current directory (#11611) 2025-12-22 16:54:03 +01:00
Nader Jaber 75a529bb8f
docs: Add expose-markdown-docusaurus-plugin resource (#11623)
Some checks are pending
Argos CI / take-screenshots (push) Waiting to run
Build Hash Router / Build Hash Router (push) Waiting to run
CodeQL / Analyze (javascript) (push) Waiting to run
Continuous Releases / Continuous Releases (push) Waiting to run
2025-12-22 10:24:50 +01:00
Salman Chishti acd96cb3f0
chore: Upgrade GitHub Actions for Node 24 compatibility (#11621) 2025-12-22 10:18:14 +01:00
Salman Chishti 0799e20b67
chore: Upgrade GitHub Actions to latest versions (#11622)
Some checks failed
Argos CI / take-screenshots (push) Has been cancelled
Build Hash Router / Build Hash Router (push) Has been cancelled
CodeQL / Analyze (javascript) (push) Has been cancelled
Continuous Releases / Continuous Releases (push) Has been cancelled
2025-12-17 16:45:43 +01:00
dependabot[bot] d4a66aa2ed
chore(deps): bump actions/setup-node from 6.0.0 to 6.1.0 (#11608)
Some checks failed
Argos CI / take-screenshots (push) Has been cancelled
Build Hash Router / Build Hash Router (push) Has been cancelled
Canary Release / Publish Canary (push) Has been cancelled
CodeQL / Analyze (javascript) (push) Has been cancelled
Continuous Releases / Continuous Releases (push) Has been cancelled
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2025-12-09 15:18:18 +01:00
dependabot[bot] 0f8cda2f65
chore(deps): bump actions/checkout from 6.0.0 to 6.0.1 (#11609)
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2025-12-09 15:18:02 +01:00
TheCyperpunk 59bce2b21c
fix(a11y): add Space key support for navbar dropdowns (#11513)
Some checks failed
Argos CI / take-screenshots (push) Has been cancelled
Build Hash Router / Build Hash Router (push) Has been cancelled
Canary Release / Publish Canary (push) Has been cancelled
CodeQL / Analyze (javascript) (push) Has been cancelled
Continuous Releases / Continuous Releases (push) Has been cancelled
E2E Tests / E2E — Yarn v1 (20) (push) Has been cancelled
E2E Tests / E2E — Yarn v1 (20.0) (push) Has been cancelled
E2E Tests / E2E — Yarn v1 (22) (push) Has been cancelled
E2E Tests / E2E — Yarn v1 (24) (push) Has been cancelled
E2E Tests / E2E — Yarn v1 (25.1) (push) Has been cancelled
E2E Tests / E2E — Yarn v1 Windows (push) Has been cancelled
E2E Tests / E2E — Yarn Berry (node-modules, -s) (push) Has been cancelled
E2E Tests / E2E — Yarn Berry (node-modules, -st) (push) Has been cancelled
E2E Tests / E2E — Yarn Berry (pnp, -s) (push) Has been cancelled
E2E Tests / E2E — Yarn Berry (pnp, -st) (push) Has been cancelled
E2E Tests / E2E — npm (push) Has been cancelled
E2E Tests / E2E — pnpm (push) Has been cancelled
2025-12-05 19:33:44 +01:00
Andrew Kazakov 21b7b7fd02
fix(eslint-plugin): specify exact type of `no-untranslated-text` rule options (#11587) 2025-12-05 19:26:58 +01:00
Sébastien Lorber 5e77169b35
fix(core): webpack aliases shouldn't be created for test files and typedefs (#11604) 2025-12-05 19:15:41 +01:00
Hugo Häggmark 616dec13b3
feat(theme-search-algolia): allow overriding transformSearchClient (#11581)
Co-authored-by: sebastien <lorber.sebastien@gmail.com>
2025-12-05 18:44:15 +01:00
Sébastien Lorber 455358880d
fix(core): Fix openBrowser AppleScript support for Arc (#11603)
Some checks are pending
Argos CI / take-screenshots (push) Waiting to run
Build Hash Router / Build Hash Router (push) Waiting to run
Canary Release / Publish Canary (push) Waiting to run
CodeQL / Analyze (javascript) (push) Waiting to run
Continuous Releases / Continuous Releases (push) Waiting to run
E2E Tests / E2E — Yarn v1 (20) (push) Waiting to run
E2E Tests / E2E — Yarn v1 (20.0) (push) Waiting to run
E2E Tests / E2E — Yarn v1 (22) (push) Waiting to run
E2E Tests / E2E — Yarn v1 (24) (push) Waiting to run
E2E Tests / E2E — Yarn v1 (25.1) (push) Waiting to run
E2E Tests / E2E — Yarn v1 Windows (push) Waiting to run
E2E Tests / E2E — Yarn Berry (node-modules, -s) (push) Waiting to run
E2E Tests / E2E — Yarn Berry (node-modules, -st) (push) Waiting to run
E2E Tests / E2E — Yarn Berry (pnp, -s) (push) Waiting to run
E2E Tests / E2E — Yarn Berry (pnp, -st) (push) Waiting to run
E2E Tests / E2E — npm (push) Waiting to run
E2E Tests / E2E — pnpm (push) Waiting to run
2025-12-05 16:41:58 +01:00
dependabot[bot] 6efe49abaf
chore(deps): bump node-forge from 1.3.1 to 1.3.3 in /examples/classic-typescript (#11601)
Some checks are pending
Argos CI / take-screenshots (push) Waiting to run
Build Hash Router / Build Hash Router (push) Waiting to run
Canary Release / Publish Canary (push) Waiting to run
CodeQL / Analyze (javascript) (push) Waiting to run
Continuous Releases / Continuous Releases (push) Waiting to run
E2E Tests / E2E — Yarn v1 (20) (push) Waiting to run
E2E Tests / E2E — Yarn v1 (20.0) (push) Waiting to run
E2E Tests / E2E — Yarn v1 (22) (push) Waiting to run
E2E Tests / E2E — Yarn v1 (24) (push) Waiting to run
E2E Tests / E2E — Yarn v1 (25.1) (push) Waiting to run
E2E Tests / E2E — Yarn v1 Windows (push) Waiting to run
E2E Tests / E2E — Yarn Berry (node-modules, -s) (push) Waiting to run
E2E Tests / E2E — Yarn Berry (node-modules, -st) (push) Waiting to run
E2E Tests / E2E — Yarn Berry (pnp, -s) (push) Waiting to run
E2E Tests / E2E — Yarn Berry (pnp, -st) (push) Waiting to run
E2E Tests / E2E — npm (push) Waiting to run
E2E Tests / E2E — pnpm (push) Waiting to run
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2025-12-04 15:16:59 +01:00
Justin Beckwith ebb0d0e3bb
chore(deps): remove unused @babel/runtime-corejs3 dependency (#11586)
Co-authored-by: sebastien <lorber.sebastien@gmail.com>
2025-12-04 15:09:29 +01:00
dependabot[bot] eccc778249
chore(deps): bump mdast-util-to-hast from 13.2.0 to 13.2.1 (#11589)
Some checks failed
Argos CI / take-screenshots (push) Has been cancelled
Build Hash Router / Build Hash Router (push) Has been cancelled
CodeQL / Analyze (javascript) (push) Has been cancelled
Continuous Releases / Continuous Releases (push) Has been cancelled
E2E Tests / E2E — Yarn v1 (20) (push) Has been cancelled
E2E Tests / E2E — Yarn v1 (20.0) (push) Has been cancelled
E2E Tests / E2E — Yarn v1 (22) (push) Has been cancelled
E2E Tests / E2E — Yarn v1 (24) (push) Has been cancelled
E2E Tests / E2E — Yarn v1 (25.1) (push) Has been cancelled
E2E Tests / E2E — Yarn v1 Windows (push) Has been cancelled
E2E Tests / E2E — Yarn Berry (node-modules, -s) (push) Has been cancelled
E2E Tests / E2E — Yarn Berry (node-modules, -st) (push) Has been cancelled
E2E Tests / E2E — Yarn Berry (pnp, -s) (push) Has been cancelled
E2E Tests / E2E — Yarn Berry (pnp, -st) (push) Has been cancelled
E2E Tests / E2E — npm (push) Has been cancelled
E2E Tests / E2E — pnpm (push) Has been cancelled
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2025-12-02 13:09:12 +01:00
Sébastien Lorber c6c0f636a8
fix(core): in `isInternalUrl()`, URI protocol scheme detection should implement the spec more strictly (#11579)
Some checks failed
Argos CI / take-screenshots (push) Has been cancelled
Build Hash Router / Build Hash Router (push) Has been cancelled
Canary Release / Publish Canary (push) Has been cancelled
CodeQL / Analyze (javascript) (push) Has been cancelled
Continuous Releases / Continuous Releases (push) Has been cancelled
E2E Tests / E2E — Yarn v1 (20) (push) Has been cancelled
E2E Tests / E2E — Yarn v1 (20.0) (push) Has been cancelled
E2E Tests / E2E — Yarn v1 (22) (push) Has been cancelled
E2E Tests / E2E — Yarn v1 (24) (push) Has been cancelled
E2E Tests / E2E — Yarn v1 (25.1) (push) Has been cancelled
E2E Tests / E2E — Yarn v1 Windows (push) Has been cancelled
E2E Tests / E2E — Yarn Berry (node-modules, -s) (push) Has been cancelled
E2E Tests / E2E — Yarn Berry (node-modules, -st) (push) Has been cancelled
E2E Tests / E2E — Yarn Berry (pnp, -s) (push) Has been cancelled
E2E Tests / E2E — Yarn Berry (pnp, -st) (push) Has been cancelled
E2E Tests / E2E — npm (push) Has been cancelled
E2E Tests / E2E — pnpm (push) Has been cancelled
2025-11-27 17:33:12 +01:00
Sébastien Lorber c32ed21431
fix(blog): Fix author paginated page url: `/blog/authors/<author>/page/2` (#11577) 2025-11-27 16:39:08 +01:00
Balthasar Hofer c6a86ff717
feat(core): support custom html elements in head tags (#11571)
Some checks are pending
Argos CI / take-screenshots (push) Waiting to run
Build Hash Router / Build Hash Router (push) Waiting to run
Canary Release / Publish Canary (push) Waiting to run
CodeQL / Analyze (javascript) (push) Waiting to run
Continuous Releases / Continuous Releases (push) Waiting to run
E2E Tests / E2E — Yarn v1 (20) (push) Waiting to run
E2E Tests / E2E — Yarn v1 (20.0) (push) Waiting to run
E2E Tests / E2E — Yarn v1 (22) (push) Waiting to run
E2E Tests / E2E — Yarn v1 (24) (push) Waiting to run
E2E Tests / E2E — Yarn v1 (25.1) (push) Waiting to run
E2E Tests / E2E — Yarn v1 Windows (push) Waiting to run
E2E Tests / E2E — Yarn Berry (node-modules, -s) (push) Waiting to run
E2E Tests / E2E — Yarn Berry (node-modules, -st) (push) Waiting to run
E2E Tests / E2E — Yarn Berry (pnp, -s) (push) Waiting to run
E2E Tests / E2E — Yarn Berry (pnp, -st) (push) Waiting to run
E2E Tests / E2E — npm (push) Waiting to run
E2E Tests / E2E — pnpm (push) Waiting to run
Co-authored-by: sebastien <lorber.sebastien@gmail.com>
2025-11-27 11:19:53 +01:00
dependabot[bot] d379344e6a
chore(deps): bump node-forge from 1.3.1 to 1.3.2 (#11574)
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2025-11-27 10:46:49 +01:00
dependabot[bot] 5c7ba4e9d6
chore(deps): bump actions/dependency-review-action from 4.8.1 to 4.8.2 (#11557)
Some checks failed
Argos CI / take-screenshots (push) Has been cancelled
Build Hash Router / Build Hash Router (push) Has been cancelled
Canary Release / Publish Canary (push) Has been cancelled
CodeQL / Analyze (javascript) (push) Has been cancelled
Continuous Releases / Continuous Releases (push) Has been cancelled
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2025-11-25 14:23:03 +01:00
dependabot[bot] a72be12acc
chore(deps): bump actions/checkout from 5.0.0 to 6.0.0 (#11569)
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2025-11-25 13:46:46 +01:00
Sébastien Lorber d6cbf6f9e8
fix(theme): Fix code block text selection copy on Firefox? (#11565)
Some checks failed
Argos CI / take-screenshots (push) Has been cancelled
Build Hash Router / Build Hash Router (push) Has been cancelled
Canary Release / Publish Canary (push) Has been cancelled
CodeQL / Analyze (javascript) (push) Has been cancelled
Continuous Releases / Continuous Releases (push) Has been cancelled
E2E Tests / E2E — Yarn v1 (20) (push) Has been cancelled
E2E Tests / E2E — Yarn v1 (20.0) (push) Has been cancelled
E2E Tests / E2E — Yarn v1 (22) (push) Has been cancelled
E2E Tests / E2E — Yarn v1 (24) (push) Has been cancelled
E2E Tests / E2E — Yarn v1 (25.1) (push) Has been cancelled
E2E Tests / E2E — Yarn v1 Windows (push) Has been cancelled
E2E Tests / E2E — Yarn Berry (node-modules, -s) (push) Has been cancelled
E2E Tests / E2E — Yarn Berry (node-modules, -st) (push) Has been cancelled
E2E Tests / E2E — Yarn Berry (pnp, -s) (push) Has been cancelled
E2E Tests / E2E — Yarn Berry (pnp, -st) (push) Has been cancelled
E2E Tests / E2E — npm (push) Has been cancelled
E2E Tests / E2E — pnpm (push) Has been cancelled
2025-11-21 19:19:55 +01:00
Pyry Takala f13adecec0
fix(theme-search-algolia): preserve query strings in useSearchResultUrlProcessor (#11560)
Some checks are pending
Argos CI / take-screenshots (push) Waiting to run
Build Hash Router / Build Hash Router (push) Waiting to run
Canary Release / Publish Canary (push) Waiting to run
CodeQL / Analyze (javascript) (push) Waiting to run
Continuous Releases / Continuous Releases (push) Waiting to run
E2E Tests / E2E — Yarn v1 (20) (push) Waiting to run
E2E Tests / E2E — Yarn v1 (20.0) (push) Waiting to run
E2E Tests / E2E — Yarn v1 (22) (push) Waiting to run
E2E Tests / E2E — Yarn v1 (24) (push) Waiting to run
E2E Tests / E2E — Yarn v1 (25.1) (push) Waiting to run
E2E Tests / E2E — Yarn v1 Windows (push) Waiting to run
E2E Tests / E2E — Yarn Berry (node-modules, -s) (push) Waiting to run
E2E Tests / E2E — Yarn Berry (node-modules, -st) (push) Waiting to run
E2E Tests / E2E — Yarn Berry (pnp, -s) (push) Waiting to run
E2E Tests / E2E — Yarn Berry (pnp, -st) (push) Waiting to run
E2E Tests / E2E — npm (push) Waiting to run
E2E Tests / E2E — pnpm (push) Waiting to run
2025-11-21 11:43:38 +01:00
Natan Yagudayev 89633b4d33
feat(theme-search-algolia): add support for DocSearch v4.3.2 and new Suggested Questions (#11541)
Co-authored-by: sebastien <lorber.sebastien@gmail.com>
2025-11-21 11:31:51 +01:00
Kohei Watanabe bbec801e3f
fix(mdx-loader): fix url.parse deprecation warning on Node 24+ (#11530)
Some checks are pending
Argos CI / take-screenshots (push) Waiting to run
Build Hash Router / Build Hash Router (push) Waiting to run
Canary Release / Publish Canary (push) Waiting to run
CodeQL / Analyze (javascript) (push) Waiting to run
Continuous Releases / Continuous Releases (push) Waiting to run
E2E Tests / E2E — Yarn v1 (20) (push) Waiting to run
E2E Tests / E2E — Yarn v1 (20.0) (push) Waiting to run
E2E Tests / E2E — Yarn v1 (22) (push) Waiting to run
E2E Tests / E2E — Yarn v1 (24) (push) Waiting to run
E2E Tests / E2E — Yarn v1 (25.1) (push) Waiting to run
E2E Tests / E2E — Yarn v1 Windows (push) Waiting to run
E2E Tests / E2E — Yarn Berry (node-modules, -s) (push) Waiting to run
E2E Tests / E2E — Yarn Berry (node-modules, -st) (push) Waiting to run
E2E Tests / E2E — Yarn Berry (pnp, -s) (push) Waiting to run
E2E Tests / E2E — Yarn Berry (pnp, -st) (push) Waiting to run
E2E Tests / E2E — npm (push) Waiting to run
E2E Tests / E2E — pnpm (push) Waiting to run
Co-authored-by: sebastien <lorber.sebastien@gmail.com>
2025-11-20 21:38:15 +01:00
Sébastien Lorber 366b4a1b26
test(blog): Add basic tests for blog routes. (#11564) 2025-11-20 16:04:06 +01:00
Sébastien Lorber 66dbc7da39 chore(docs, blog, pages): refactor/normalize plugin option id types for all content plugins (#11563)
Some checks are pending
Argos CI / take-screenshots (push) Waiting to run
Build Hash Router / Build Hash Router (push) Waiting to run
Canary Release / Publish Canary (push) Waiting to run
CodeQL / Analyze (javascript) (push) Waiting to run
Continuous Releases / Continuous Releases (push) Waiting to run
E2E Tests / E2E — Yarn v1 (20) (push) Waiting to run
E2E Tests / E2E — Yarn v1 (20.0) (push) Waiting to run
E2E Tests / E2E — Yarn v1 (22) (push) Waiting to run
E2E Tests / E2E — Yarn v1 (24) (push) Waiting to run
E2E Tests / E2E — Yarn v1 (25.1) (push) Waiting to run
E2E Tests / E2E — Yarn v1 Windows (push) Waiting to run
E2E Tests / E2E — Yarn Berry (node-modules, -s) (push) Waiting to run
E2E Tests / E2E — Yarn Berry (node-modules, -st) (push) Waiting to run
E2E Tests / E2E — Yarn Berry (pnp, -s) (push) Waiting to run
E2E Tests / E2E — Yarn Berry (pnp, -st) (push) Waiting to run
E2E Tests / E2E — npm (push) Waiting to run
E2E Tests / E2E — pnpm (push) Waiting to run
2025-11-20 13:27:29 +01:00
Sébastien Lorber 37530aaafb
chore(blog): refactor blog Content, remove useless `blogListPaginated` attribute (#11562) 2025-11-20 12:52:29 +01:00
Pyry Takala 7880f26a07
fix(content-blog): filter unlisted posts from author pages (#11559) 2025-11-20 11:20:02 +01:00
Max Clayton Clowes b61745a9e2
docs: resource add plugins glossary & cookie-consentl; fix formatting… (#11555) 2025-11-20 11:18:37 +01:00
Sébastien Lorber 05acc90c01
chore(ci): Improve Netlify cache + Run `git backfill` in parallel (#11554)
Some checks failed
Argos CI / take-screenshots (push) Has been cancelled
Build Hash Router / Build Hash Router (push) Has been cancelled
CodeQL / Analyze (javascript) (push) Has been cancelled
Continuous Releases / Continuous Releases (push) Has been cancelled
2025-11-17 11:46:23 +01:00
Sébastien Lorber 963159b3c1
chore(ci): upgrade Netlify to Node 24 (LTS) + add `git backfill` command (#11553) 2025-11-17 09:50:11 +01:00
Sébastien Lorber acc66c14b0
feat(core): New siteConfig `future.experimental_vcs` API + `future.experimental_faster.gitEagerVcs` flag (#11512)
Some checks failed
Argos CI / take-screenshots (push) Has been cancelled
Build Hash Router / Build Hash Router (push) Has been cancelled
Canary Release / Publish Canary (push) Has been cancelled
CodeQL / Analyze (javascript) (push) Has been cancelled
Continuous Releases / Continuous Releases (push) Has been cancelled
E2E Tests / E2E — Yarn v1 (20) (push) Has been cancelled
E2E Tests / E2E — Yarn v1 (20.0) (push) Has been cancelled
E2E Tests / E2E — Yarn v1 (22) (push) Has been cancelled
E2E Tests / E2E — Yarn v1 (24) (push) Has been cancelled
E2E Tests / E2E — Yarn v1 (25.1) (push) Has been cancelled
E2E Tests / E2E — Yarn v1 Windows (push) Has been cancelled
E2E Tests / E2E — Yarn Berry (node-modules, -s) (push) Has been cancelled
E2E Tests / E2E — Yarn Berry (node-modules, -st) (push) Has been cancelled
E2E Tests / E2E — Yarn Berry (pnp, -s) (push) Has been cancelled
E2E Tests / E2E — Yarn Berry (pnp, -st) (push) Has been cancelled
E2E Tests / E2E — npm (push) Has been cancelled
E2E Tests / E2E — pnpm (push) Has been cancelled
Co-authored-by: slorber <749374+slorber@users.noreply.github.com>
2025-11-14 18:15:45 +01:00
dependabot[bot] a24b8ad5ed
chore(deps): bump js-yaml from 4.1.0 to 4.1.1 (#11551)
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2025-11-14 17:23:54 +01:00
Sébastien Lorber 9c85f8689a
fix(core): optimize i18n integration for site builds + improve inference of locale config (#11550) 2025-11-14 13:13:05 +01:00
Luiz Carlos 6a38ccdfb0
fix(translations): complete theme translations for Algolia pt-br (#11533)
Some checks failed
Argos CI / take-screenshots (push) Has been cancelled
Build Hash Router / Build Hash Router (push) Has been cancelled
Canary Release / Publish Canary (push) Has been cancelled
CodeQL / Analyze (javascript) (push) Has been cancelled
Continuous Releases / Continuous Releases (push) Has been cancelled
E2E Tests / E2E — Yarn v1 (20) (push) Has been cancelled
E2E Tests / E2E — Yarn v1 (20.0) (push) Has been cancelled
E2E Tests / E2E — Yarn v1 (22) (push) Has been cancelled
E2E Tests / E2E — Yarn v1 (24) (push) Has been cancelled
E2E Tests / E2E — Yarn v1 (25) (push) Has been cancelled
E2E Tests / E2E — Yarn v1 Windows (push) Has been cancelled
E2E Tests / E2E — Yarn Berry (node-modules, -s) (push) Has been cancelled
E2E Tests / E2E — Yarn Berry (node-modules, -st) (push) Has been cancelled
E2E Tests / E2E — Yarn Berry (pnp, -s) (push) Has been cancelled
E2E Tests / E2E — Yarn Berry (pnp, -st) (push) Has been cancelled
E2E Tests / E2E — npm (push) Has been cancelled
E2E Tests / E2E — pnpm (push) Has been cancelled
2025-11-06 14:37:29 +01:00
Bhoomi Sharma c81409b5a3
docs: Remove redundant "as well" in README introduction (#11525)
Some checks failed
Argos CI / take-screenshots (push) Has been cancelled
Build Hash Router / Build Hash Router (push) Has been cancelled
CodeQL / Analyze (javascript) (push) Has been cancelled
Continuous Releases / Continuous Releases (push) Has been cancelled
2025-10-31 18:11:11 +01:00
dependabot[bot] f8bedbd0a0
chore(deps): bump actions/upload-artifact from 4 to 5 (#11514)
Some checks failed
Argos CI / take-screenshots (push) Has been cancelled
Build Hash Router / Build Hash Router (push) Has been cancelled
CodeQL / Analyze (javascript) (push) Has been cancelled
Continuous Releases / Continuous Releases (push) Has been cancelled
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2025-10-28 16:59:28 +01:00
dependabot[bot] 7651d42e11
chore(deps): bump github/codeql-action from 4.30.9 to 4.31.0 (#11515)
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2025-10-28 16:58:48 +01:00
dependabot[bot] a4742594a9
chore(deps): bump github/codeql-action from 4.30.8 to 4.30.9 (#11504)
Some checks failed
Argos CI / take-screenshots (push) Has been cancelled
Build Hash Router / Build Hash Router (push) Has been cancelled
Canary Release / Publish Canary (push) Has been cancelled
CodeQL / Analyze (javascript) (push) Has been cancelled
Continuous Releases / Continuous Releases (push) Has been cancelled
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2025-10-21 17:41:10 +02:00
dependabot[bot] 74542245b3
chore(deps): bump actions/setup-node from 5.0.0 to 6.0.0 (#11503)
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2025-10-21 17:41:00 +02:00
Anukool Pandey 6b3ed1ee65
fix(website): Correct the site homepage overflows (CTA buttons + new version heading) (#11500)
Some checks are pending
Argos CI / take-screenshots (push) Waiting to run
Build Hash Router / Build Hash Router (push) Waiting to run
CodeQL / Analyze (javascript) (push) Waiting to run
Continuous Releases / Continuous Releases (push) Waiting to run
Co-authored-by: sebastien <lorber.sebastien@gmail.com>
2025-10-20 13:15:59 +02:00
Sébastien Lorber 0372ecd1e9
fix(faster): fix server build SWC / browserslist node target (#11496)
Some checks failed
Argos CI / take-screenshots (push) Has been cancelled
Build Hash Router / Build Hash Router (push) Has been cancelled
Canary Release / Publish Canary (push) Has been cancelled
CodeQL / Analyze (javascript) (push) Has been cancelled
Continuous Releases / Continuous Releases (push) Has been cancelled
E2E Tests / E2E — Yarn v1 (20) (push) Has been cancelled
E2E Tests / E2E — Yarn v1 (20.0) (push) Has been cancelled
E2E Tests / E2E — Yarn v1 (22) (push) Has been cancelled
E2E Tests / E2E — Yarn v1 (24) (push) Has been cancelled
E2E Tests / E2E — Yarn v1 (25) (push) Has been cancelled
E2E Tests / E2E — Yarn v1 Windows (push) Has been cancelled
E2E Tests / E2E — Yarn Berry (node-modules, -s) (push) Has been cancelled
E2E Tests / E2E — Yarn Berry (node-modules, -st) (push) Has been cancelled
E2E Tests / E2E — Yarn Berry (pnp, -s) (push) Has been cancelled
E2E Tests / E2E — Yarn Berry (pnp, -st) (push) Has been cancelled
E2E Tests / E2E — npm (push) Has been cancelled
E2E Tests / E2E — pnpm (push) Has been cancelled
2025-10-17 21:02:45 +02:00
Sébastien Lorber e133e8d6d2
chore(ci): add nodejs 25 to CI jobs matrix (#11495) 2025-10-17 18:08:45 +02:00
Sébastien Lorber 5e846f6496
chore: update examples for v3.9.2 (#11494) 2025-10-17 17:38:38 +02:00
Sébastien Lorber bca9ce746f
chore: release v3.9.2 (#11491) 2025-10-17 17:15:09 +02:00
132 changed files with 5139 additions and 2108 deletions

View File

@ -27,10 +27,10 @@ jobs:
runs-on: ubuntu-latest
steps:
- name: Check out repository code
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
- name: Use Node.js
uses: actions/setup-node@a0853c24544627f65ddf259abe73b1d18a591444 # v5.0.0
uses: actions/setup-node@395ad3262231945c25e8478fd5baf05154b1d79f # v6.1.0
with:
node-version: lts/*
cache: yarn

View File

@ -22,9 +22,9 @@ jobs:
runs-on: ubuntu-latest
steps:
- name: Checkout
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
- name: Set up Node
uses: actions/setup-node@a0853c24544627f65ddf259abe73b1d18a591444 # v5.0.0
uses: actions/setup-node@395ad3262231945c25e8478fd5baf05154b1d79f # v6.1.0
with:
node-version: lts/*
cache: yarn
@ -32,3 +32,5 @@ jobs:
run: yarn || yarn || yarn
- name: Build blog-only
run: yarn workspace website build:blogOnly
env:
DOCUSAURUS_PERF_LOGGER: 'true'

View File

@ -25,9 +25,9 @@ jobs:
runs-on: ubuntu-latest
steps:
- name: Checkout
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
- name: Set up Node
uses: actions/setup-node@a0853c24544627f65ddf259abe73b1d18a591444 # v5.0.0
uses: actions/setup-node@395ad3262231945c25e8478fd5baf05154b1d79f # v6.1.0
with:
node-version: lts/*
cache: yarn
@ -37,26 +37,27 @@ jobs:
- name: Build Hash Router
run: yarn build:website:fast
env:
DOCUSAURUS_PERF_LOGGER: 'true'
DOCUSAURUS_ROUTER: 'hash'
# Note: hash router + baseUrl do not play well together
# This would host at https://facebook.github.io/docusaurus/#/docusaurus/
# BASE_URL: '/docusaurus/' # hash router +
- name: Upload Website artifact
uses: actions/upload-artifact@v4
uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0
with:
name: website-hash-router-archive
path: website/build
#- name: Upload Website Pages artifact
# uses: actions/upload-pages-artifact@v3
# uses: actions/upload-pages-artifact@7b1f4a764d45c48632c6b24a0339c27f5614fb0b # v4.0.0
# with:
# path: website/build
# Deploy to https://facebook.github.io/docusaurus/
- name: Deploy to GitHub Pages
if: ${{ github.event_name != 'pull_request' && github.ref_name == 'main' }}
uses: peaceiris/actions-gh-pages@v4
uses: peaceiris/actions-gh-pages@4f9cc6602d3f66b9c108549d475ec49e8ef4d45e # v4.0.0
with:
github_token: ${{ secrets.GITHUB_TOKEN }}
publish_dir: website/build
@ -80,4 +81,4 @@ jobs:
# steps:
# - name: Deploy to GitHub Pages
# id: deployment
# uses: actions/deploy-pages@v4
# uses: actions/deploy-pages@d6db90164ac5ed86f2b6aed7e0febac5b3c0c03e # v4.0.5

View File

@ -41,14 +41,14 @@ jobs:
DOCUSAURUS_INFRA: ['SLOWER', 'FASTER']
steps:
- name: Checkout
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
- name: Set up Node
uses: actions/setup-node@a0853c24544627f65ddf259abe73b1d18a591444 # v5.0.0
uses: actions/setup-node@395ad3262231945c25e8478fd5baf05154b1d79f # v6.1.0
with:
node-version: lts/*
cache: yarn
- name: Track build size changes
uses: preactjs/compressed-size-action@946a292cd35bd1088e0d7eb92b69d1a8d5b5d76a # v2
uses: preactjs/compressed-size-action@8518045ed95e94e971b83333085e1cb99aa18aa8 # v2.9.0
with:
repo-token: ${{ secrets.GITHUB_TOKEN }}
build-script: build:website:fast
@ -62,6 +62,7 @@ jobs:
comment-key: DOCUSAURUS_INFRA_${{ matrix.DOCUSAURUS_INFRA }}
env:
DOCUSAURUS_SLOWER: ${{ matrix.DOCUSAURUS_INFRA == 'SLOWER' && 'true' || 'false' }}
DOCUSAURUS_PERF_LOGGER: 'true'
# Ensures build times stay under reasonable thresholds
build-time:
@ -73,9 +74,9 @@ jobs:
DOCUSAURUS_INFRA: ['SLOWER', 'FASTER']
steps:
- name: Checkout
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
- name: Set up Node
uses: actions/setup-node@a0853c24544627f65ddf259abe73b1d18a591444 # v5.0.0
uses: actions/setup-node@395ad3262231945c25e8478fd5baf05154b1d79f # v6.1.0
with:
node-version: lts/*
cache: yarn
@ -88,6 +89,7 @@ jobs:
timeout-minutes: ${{ matrix.DOCUSAURUS_INFRA == 'SLOWER' && 3 || 2 }}
env:
DOCUSAURUS_SLOWER: ${{ matrix.DOCUSAURUS_INFRA == 'SLOWER' && 'true' || 'false' }}
DOCUSAURUS_PERF_LOGGER: 'true'
# Ensure build with a warm cache does not increase too much
- name: Build (warm cache)
@ -96,5 +98,6 @@ jobs:
timeout-minutes: ${{ matrix.DOCUSAURUS_INFRA == 'SLOWER' && 1 || 2 }}
env:
DOCUSAURUS_SLOWER: ${{ matrix.DOCUSAURUS_INFRA == 'SLOWER' && 'true' || 'false' }}
DOCUSAURUS_PERF_LOGGER: 'true'
# TODO post a GitHub comment with build with perf warnings?

View File

@ -20,11 +20,11 @@ jobs:
runs-on: ubuntu-latest
steps:
- name: Checkout
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
with:
fetch-depth: 0 # Needed to get the commit number with "git rev-list --count HEAD"
- name: Set up Node
uses: actions/setup-node@a0853c24544627f65ddf259abe73b1d18a591444 # v5.0.0
uses: actions/setup-node@395ad3262231945c25e8478fd5baf05154b1d79f # v6.1.0
with:
node-version: lts/*
cache: yarn

View File

@ -33,12 +33,12 @@ jobs:
steps:
- name: Checkout
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
- name: Initialize CodeQL
uses: github/codeql-action/init@f443b600d91635bebf5b0d9ebc620189c0d6fba5 # 4.30.8
uses: github/codeql-action/init@4e94bd11f71e507f7f87df81788dff88d1dacbfb # 4.31.0
with:
languages: ${{ matrix.language }}
- name: Perform CodeQL Analysis
uses: github/codeql-action/analyze@f443b600d91635bebf5b0d9ebc620189c0d6fba5 # 4.30.8
uses: github/codeql-action/analyze@4e94bd11f71e507f7f87df81788dff88d1dacbfb # 4.31.0

View File

@ -18,10 +18,10 @@ jobs:
steps:
- name: Checkout
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
- name: Set up Node
uses: actions/setup-node@a0853c24544627f65ddf259abe73b1d18a591444 # v5.0.0
uses: actions/setup-node@395ad3262231945c25e8478fd5baf05154b1d79f # v6.1.0
with:
node-version: lts/*
cache: yarn

View File

@ -13,6 +13,6 @@ jobs:
runs-on: ubuntu-latest
steps:
- name: Checkout
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
- name: Dependency Review
uses: actions/dependency-review-action@40c09b7dc99638e5ddb0bfd91c1673effc064d8a # 4.8.1
uses: actions/dependency-review-action@3c4e3dcb1aa7874d2c16be7d79418e9b7efd6261 # 4.8.2

View File

@ -21,10 +21,10 @@ jobs:
runs-on: ubuntu-latest
steps:
- name: Checkout
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
- name: Use Node.js
uses: actions/setup-node@a0853c24544627f65ddf259abe73b1d18a591444 # v5.0.0
uses: actions/setup-node@395ad3262231945c25e8478fd5baf05154b1d79f # v6.1.0
with:
node-version: lts/*
cache: yarn

View File

@ -19,7 +19,7 @@ jobs:
contents: write
steps:
- uses: actions/checkout@v5
- uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
with:
repository: ${{ github.event.pull_request.head.repo.full_name }}
ref: ${{ github.head_ref }}
@ -42,6 +42,6 @@ jobs:
- name: Print Diff
run: git diff
- uses: stefanzweifel/git-auto-commit-action@v7
- uses: stefanzweifel/git-auto-commit-action@04702edda442b2e678b25b537cec683a1493fcb9 # v7.1.0
with:
commit_message: 'refactor: apply lint autofix'

View File

@ -20,9 +20,9 @@ jobs:
runs-on: ubuntu-latest
steps:
- name: Checkout
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
- name: Set up Node
uses: actions/setup-node@a0853c24544627f65ddf259abe73b1d18a591444 # v5.0.0
uses: actions/setup-node@395ad3262231945c25e8478fd5baf05154b1d79f # v6.1.0
with:
node-version: lts/*
cache: yarn

View File

@ -22,9 +22,9 @@ jobs:
runs-on: ubuntu-latest
steps:
- name: Checkout
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
- name: Set up Node
uses: actions/setup-node@a0853c24544627f65ddf259abe73b1d18a591444 # v5.0.0
uses: actions/setup-node@395ad3262231945c25e8478fd5baf05154b1d79f # v6.1.0
with:
node-version: lts/*
cache: yarn

View File

@ -38,12 +38,12 @@ jobs:
runs-on: ubuntu-latest
strategy:
matrix:
node: ['20.0', '20', '22', '24']
node: ['20.0', '20', '22', '24', '25.1']
steps:
- name: Checkout
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
- name: Use Node.js ${{ matrix.node }}
uses: actions/setup-node@a0853c24544627f65ddf259abe73b1d18a591444 # v5.0.0
uses: actions/setup-node@395ad3262231945c25e8478fd5baf05154b1d79f # v6.1.0
with:
node-version: ${{ matrix.node }}
cache: yarn
@ -78,9 +78,9 @@ jobs:
runs-on: windows-8-core
steps:
- name: Checkout
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
- name: Use Node.js LTS
uses: actions/setup-node@a0853c24544627f65ddf259abe73b1d18a591444 # v5.0.0
uses: actions/setup-node@395ad3262231945c25e8478fd5baf05154b1d79f # v6.1.0
with:
node-version: lts/*
cache: yarn
@ -109,7 +109,7 @@ jobs:
DOCUSAURUS_PERF_LOGGER: 'true'
working-directory: test-website-in-workspace
- name: Upload Website artifact
uses: actions/upload-artifact@v4
uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0
with:
name: website-e2e-windows
path: test-website-in-workspace/build
@ -124,9 +124,9 @@ jobs:
variant: [-s, -st]
steps:
- name: Checkout
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
- name: Use Node.js LTS
uses: actions/setup-node@a0853c24544627f65ddf259abe73b1d18a591444 # v5.0.0
uses: actions/setup-node@395ad3262231945c25e8478fd5baf05154b1d79f # v6.1.0
with:
node-version: lts/*
cache: yarn
@ -193,9 +193,9 @@ jobs:
runs-on: ubuntu-latest
steps:
- name: Checkout
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
- name: Use Node.js LTS
uses: actions/setup-node@a0853c24544627f65ddf259abe73b1d18a591444 # v5.0.0
uses: actions/setup-node@395ad3262231945c25e8478fd5baf05154b1d79f # v6.1.0
with:
node-version: lts/*
cache: yarn
@ -233,9 +233,9 @@ jobs:
runs-on: ubuntu-latest
steps:
- name: Checkout
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
- name: Use Node.js LTS
uses: actions/setup-node@a0853c24544627f65ddf259abe73b1d18a591444 # v5.0.0
uses: actions/setup-node@395ad3262231945c25e8478fd5baf05154b1d79f # v6.1.0
with:
node-version: lts/*
cache: yarn

View File

@ -26,9 +26,9 @@ jobs:
variant: ['js', 'ts']
steps:
- name: Checkout
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
- name: Set up Node LTS
uses: actions/setup-node@a0853c24544627f65ddf259abe73b1d18a591444 # v5.0.0
uses: actions/setup-node@395ad3262231945c25e8478fd5baf05154b1d79f # v6.1.0
with:
node-version: lts/*
cache: yarn

View File

@ -27,14 +27,14 @@ jobs:
runs-on: windows-latest
strategy:
matrix:
node: ['20.0', '20', '22', '24']
node: ['20.0', '20', '22', '24', '25.1']
steps:
- name: Support longpaths
run: git config --system core.longpaths true
- name: Checkout
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
- name: Use Node.js ${{ matrix.node }}
uses: actions/setup-node@a0853c24544627f65ddf259abe73b1d18a591444 # v5.0.0
uses: actions/setup-node@395ad3262231945c25e8478fd5baf05154b1d79f # v6.1.0
with:
node-version: ${{ matrix.node }}
cache: yarn
@ -54,6 +54,8 @@ jobs:
run: yarn workspace website test:swizzle:wrap:ts
- name: Docusaurus Build
run: yarn build:website:fast
env:
DOCUSAURUS_PERF_LOGGER: 'true'
- name: TypeCheck website
# see https://github.com/facebook/docusaurus/pull/10486

View File

@ -27,12 +27,12 @@ jobs:
runs-on: ubuntu-latest
strategy:
matrix:
node: ['20.0', '20', '22', '24']
node: ['20.0', '20', '22', '24', '25.1']
steps:
- name: Checkout
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
- name: Use Node.js ${{ matrix.node }}
uses: actions/setup-node@a0853c24544627f65ddf259abe73b1d18a591444 # v5.0.0
uses: actions/setup-node@395ad3262231945c25e8478fd5baf05154b1d79f # v6.1.0
with:
node-version: ${{ matrix.node }}
cache: yarn

View File

@ -1,7 +1,7 @@
dist
node_modules
.yarn
build
**/build/**
coverage
.docusaurus
.idea
@ -11,6 +11,8 @@ coverage
jest/vendor
argos/test-results
packages/lqip-loader/lib/
packages/docusaurus/lib/
packages/docusaurus-*/lib/*

View File

@ -45,7 +45,7 @@ Short on time? Check out our [5-minute tutorial ⏱️](https://tutorial.docusau
- **Customizable**
> While Docusaurus ships with the key pages and sections you need to get started, including a home page, a docs section, a [blog](https://docusaurus.io/docs/blog), and additional support pages, it is also [customizable](https://docusaurus.io/docs/creating-pages) as well to ensure you have a site that is [uniquely yours](https://docusaurus.io/docs/styling-layout).
> While Docusaurus ships with the key pages and sections you need to get started, including a home page, a docs section, a [blog](https://docusaurus.io/docs/blog), and additional support pages, it is also [customizable](https://docusaurus.io/docs/creating-pages) to ensure you have a site that is [uniquely yours](https://docusaurus.io/docs/styling-layout).
## Installation

View File

@ -16,8 +16,8 @@
"dev": "docusaurus start"
},
"dependencies": {
"@docusaurus/core": "3.9.0",
"@docusaurus/preset-classic": "3.9.0",
"@docusaurus/core": "3.9.2",
"@docusaurus/preset-classic": "3.9.2",
"@mdx-js/react": "^3.0.0",
"clsx": "^2.0.0",
"prism-react-renderer": "^2.3.0",
@ -25,9 +25,9 @@
"react-dom": "^19.0.0"
},
"devDependencies": {
"@docusaurus/module-type-aliases": "3.9.0",
"@docusaurus/tsconfig": "3.9.0",
"@docusaurus/types": "3.9.0",
"@docusaurus/module-type-aliases": "3.9.2",
"@docusaurus/tsconfig": "3.9.2",
"@docusaurus/types": "3.9.2",
"typescript": "~5.6.2"
},
"browserslist": {

File diff suppressed because it is too large Load Diff

View File

@ -15,8 +15,8 @@
"dev": "docusaurus start"
},
"dependencies": {
"@docusaurus/core": "3.9.0",
"@docusaurus/preset-classic": "3.9.0",
"@docusaurus/core": "3.9.2",
"@docusaurus/preset-classic": "3.9.2",
"@mdx-js/react": "^3.0.0",
"clsx": "^2.0.0",
"prism-react-renderer": "^2.3.0",
@ -24,8 +24,8 @@
"react-dom": "^19.0.0"
},
"devDependencies": {
"@docusaurus/module-type-aliases": "3.9.0",
"@docusaurus/types": "3.9.0"
"@docusaurus/module-type-aliases": "3.9.2",
"@docusaurus/types": "3.9.2"
},
"browserslist": {
"production": [

File diff suppressed because it is too large Load Diff

5
jest/deps.d.ts vendored
View File

@ -12,8 +12,3 @@ declare module 'to-vfile' {
export function read(path: string, encoding?: string): Promise<VFile>;
}
declare module '@testing-utils/git' {
const createTempRepo: typeof import('./utils/git').createTempRepo;
export {createTempRepo};
}

View File

@ -82,7 +82,7 @@ function normalizePaths<T>(value: T): T {
(val) => val.split(cwdReal).join('<PROJECT_ROOT>'),
(val) => val.split(cwd).join('<PROJECT_ROOT>'),
// Replace home directory with <TEMP_DIR>
// Replace temp directory with <TEMP_DIR>
(val) => val.split(tempDirReal).join('<TEMP_DIR>'),
(val) => val.split(tempDir).join('<TEMP_DIR>'),

63
jest/utils/git.ts vendored
View File

@ -1,63 +0,0 @@
/**
* Copyright (c) Facebook, Inc. and its affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree.
*/
import fs from 'fs-extra';
import os from 'os';
import path from 'path';
import shell from 'shelljs';
class Git {
constructor(private dir: string) {
const res = shell.exec('git init', {cwd: dir, silent: true});
if (res.code !== 0) {
throw new Error(`git init exited with code ${res.code}.
stderr: ${res.stderr}
stdout: ${res.stdout}`);
}
// Doesn't matter currently
shell.exec('git config user.email "test@jc-verse.com"', {
cwd: dir,
silent: true,
});
shell.exec('git config user.name "Test"', {cwd: dir, silent: true});
shell.exec('git commit --allow-empty -m "First commit"', {
cwd: dir,
silent: true,
});
}
commit(msg: string, date: string, author: string): void {
const addRes = shell.exec('git add .', {cwd: this.dir, silent: true});
const commitRes = shell.exec(
`git commit -m "${msg}" --date "${date}T00:00:00Z" --author "${author}"`,
{
cwd: this.dir,
env: {GIT_COMMITTER_DATE: `${date}T00:00:00Z`},
silent: true,
},
);
if (addRes.code !== 0) {
throw new Error(`git add exited with code ${addRes.code}.
stderr: ${addRes.stderr}
stdout: ${addRes.stdout}`);
}
if (commitRes.code !== 0) {
throw new Error(`git commit exited with code ${commitRes.code}.
stderr: ${commitRes.stderr}
stdout: ${commitRes.stdout}`);
}
}
}
// This function is sync so the same mock repo can be shared across tests
export function createTempRepo(): {repoDir: string; git: Git} {
const repoDir = fs.mkdtempSync(path.join(os.tmpdir(), 'git-test-repo'));
const git = new Git(repoDir);
return {repoDir, git};
}

View File

@ -25,7 +25,7 @@
"@docusaurus/logger": "3.9.2",
"@docusaurus/utils": "3.9.2",
"commander": "^5.1.0",
"execa": "5.1.1",
"execa": "^5.1.1",
"fs-extra": "^11.1.1",
"lodash": "^4.17.21",
"prompts": "^2.4.2",

View File

@ -273,7 +273,10 @@ async function getSiteName(
return 'A website name is required.';
}
const dest = path.resolve(rootDir, siteName);
if (await fs.pathExists(dest)) {
if (siteName === '.' && (await fs.readdir(dest)).length > 0) {
return logger.interpolate`Directory not empty at path=${dest}!`;
}
if (siteName !== '.' && (await fs.pathExists(dest))) {
return logger.interpolate`Directory already exists at path=${dest}!`;
}
return true;

View File

@ -36,7 +36,6 @@
"@babel/preset-react": "^7.25.9",
"@babel/preset-typescript": "^7.25.9",
"@babel/runtime": "^7.25.9",
"@babel/runtime-corejs3": "^7.25.9",
"@babel/traverse": "^7.25.9",
"@docusaurus/logger": "3.9.2",
"@docusaurus/utils": "3.9.2",

View File

@ -28,7 +28,7 @@ async function createSwcJsLoaderFactory(): Promise<
return ({isServer}) => {
return {
loader,
options: getOptions({isServer}),
options: getOptions({isServer, bundlerName: 'webpack'}),
};
};
}
@ -42,7 +42,7 @@ async function createRspackSwcJsLoaderFactory(): Promise<
return ({isServer}) => {
return {
loader,
options: getOptions({isServer}),
options: getOptions({isServer, bundlerName: 'rspack'}),
};
};
}

View File

@ -142,7 +142,10 @@ async function getRspackMinimizers({
}: MinimizersConfig): Promise<WebpackPluginInstance[]> {
const rspack = getCurrentBundlerAsRspack({currentBundler});
const getBrowserslistQueries = await importGetBrowserslistQueries();
const browserslistQueries = getBrowserslistQueries({isServer: false});
const browserslistQueries = getBrowserslistQueries({
isServer: false,
bundlerName: 'rspack',
});
const swcJsMinimizerOptions = await importSwcJsMinimizerOptions();
return [
// See https://rspack.dev/plugins/rspack/swc-js-minimizer-rspack-plugin

View File

@ -24,6 +24,7 @@
"@swc/html": "^1.13.5",
"browserslist": "^4.24.2",
"lightningcss": "^1.27.0",
"semver": "^7.5.4",
"swc-loader": "^0.2.6",
"tslib": "^2.6.0",
"webpack": "^5.95.0"

View File

@ -9,18 +9,22 @@ import Rspack from '@rspack/core';
import * as lightningcss from 'lightningcss';
import browserslist from 'browserslist';
import {minify as swcHtmlMinifier} from '@swc/html';
import semver from 'semver';
import type {JsMinifyOptions, Options as SwcOptions} from '@swc/core';
import type {CurrentBundler} from '@docusaurus/types';
export const swcLoader = require.resolve('swc-loader');
export const getSwcLoaderOptions = ({
isServer,
bundlerName,
}: {
isServer: boolean;
bundlerName: CurrentBundler['name'];
}): SwcOptions => {
return {
env: {
targets: getBrowserslistQueries({isServer}),
targets: getBrowserslistQueries({isServer, bundlerName}),
},
jsc: {
parser: {
@ -63,20 +67,53 @@ export function getSwcJsMinimizerOptions(): JsMinifyOptions {
};
}
// TODO this is not accurate
// for Rspack we should read from the built-in browserslist data
// see https://github.com/facebook/docusaurus/pull/11496
function getLastBrowserslistKnownNodeVersion(
bundlerName: CurrentBundler['name'],
): string {
if (bundlerName === 'rspack') {
// TODO hardcoded value until Rspack exposes its Browserslist data
// see https://github.com/facebook/docusaurus/pull/11496
return '22.0.0';
}
// browserslist('last 1 node versions')[0]!.replace('node ', '')
return browserslist.nodeVersions.at(-1)!;
}
function getMinVersion(v1: string, v2: string): string {
return semver.lt(v1, v2) ? v1 : v2;
}
// We need this because of Rspack built-in LightningCSS integration
// See https://github.com/orgs/browserslist/discussions/846
export function getBrowserslistQueries({
isServer,
bundlerName,
}: {
isServer: boolean;
bundlerName: CurrentBundler['name'];
}): string[] {
if (isServer) {
return [`node ${process.versions.node}`];
// Escape hatch env variable
if (process.env.DOCUSAURUS_SERVER_NODE_TARGET) {
return [`node ${process.env.DOCUSAURUS_SERVER_NODE_TARGET}`];
}
// For server builds, we want to use the current Node version as target
// But we can't pass a target that Browserslist doesn't know about yet
const nodeTarget = getMinVersion(
process.versions.node,
getLastBrowserslistKnownNodeVersion(bundlerName),
);
return [`node ${nodeTarget}`];
}
const queries = browserslist.loadConfig({path: process.cwd()}) ?? [
...browserslist.defaults,
];
return queries;
}

View File

@ -6,7 +6,6 @@
*/
import path from 'path';
import url from 'url';
import fs from 'fs-extra';
import {
toMessageRelativeFilePath,
@ -15,6 +14,7 @@ import {
findAsyncSequential,
getFileLoaderUtils,
parseURLOrPath,
parseLocalURLPath,
} from '@docusaurus/utils';
import escapeHtml from 'escape-html';
import {imageSizeFromFile} from 'image-size/fromFile';
@ -207,11 +207,11 @@ async function processImageNode(target: Target, context: Context) {
return;
}
const parsedUrl = url.parse(node.url);
if (parsedUrl.protocol || !parsedUrl.pathname) {
// pathname:// is an escape hatch, in case user does not want her images to
const localUrlPath = parseLocalURLPath(node.url);
if (!localUrlPath) {
// pathname:// is an escape hatch, in case the user does not want images to
// be converted to require calls going through webpack loader
if (parsedUrl.protocol === 'pathname:') {
if (parseURLOrPath(node.url).protocol === 'pathname:') {
node.url = node.url.replace('pathname://', '');
}
return;
@ -220,7 +220,7 @@ async function processImageNode(target: Target, context: Context) {
// We decode it first because Node Url.pathname is always encoded
// while the image file-system path are not.
// See https://github.com/facebook/docusaurus/discussions/10720
const decodedPathname = decodeURIComponent(parsedUrl.pathname);
const decodedPathname = decodeURIComponent(localUrlPath.pathname);
// We try to convert image urls without protocol to images with require calls
// going through webpack ensures that image assets exist at build time

View File

@ -6,7 +6,6 @@
*/
import path from 'path';
import url from 'url';
import fs from 'fs-extra';
import {
toMessageRelativeFilePath,
@ -15,6 +14,7 @@ import {
findAsyncSequential,
getFileLoaderUtils,
parseURLOrPath,
parseLocalURLPath,
} from '@docusaurus/utils';
import escapeHtml from 'escape-html';
import logger from '@docusaurus/logger';
@ -209,21 +209,22 @@ async function processLinkNode(target: Target, context: Context) {
return;
}
const parsedUrl = url.parse(node.url);
if (parsedUrl.protocol || !parsedUrl.pathname) {
const localUrlPath = parseLocalURLPath(node.url);
if (!localUrlPath) {
// Don't process pathname:// here, it's used by the <Link> component
return;
}
const hasSiteAlias = parsedUrl.pathname.startsWith('@site/');
const hasSiteAlias = localUrlPath.pathname.startsWith('@site/');
const hasAssetLikeExtension =
path.extname(parsedUrl.pathname) &&
!parsedUrl.pathname.match(/\.(?:mdx?|html)(?:#|$)/);
path.extname(localUrlPath.pathname) &&
!localUrlPath.pathname.match(/\.(?:mdx?|html)(?:#|$)/);
if (!hasSiteAlias && !hasAssetLikeExtension) {
return;
}
const localFilePath = await getLocalFileAbsolutePath(
decodeURIComponent(parsedUrl.pathname),
decodeURIComponent(localUrlPath.pathname),
context,
);

View File

@ -0,0 +1,637 @@
// Jest Snapshot v1, https://goo.gl/fbAQLP
exports[`buildAllRoutes works for realistic blog post 2`] = `
[
{
"component": "@theme/BlogPostPage",
"context": {
"blogMetadata": "@aliased/data/blogMetadata-default.json",
},
"exact": true,
"metadata": {
"lastUpdatedAt": undefined,
"sourceFilePath": "blog/post1.md",
},
"modules": {
"content": "@site/blog/post1.md",
"sidebar": "@aliased/data/blog-post-list-prop-default.json",
},
"path": "/blog/post1",
},
{
"component": "@theme/BlogPostPage",
"context": {
"blogMetadata": "@aliased/data/blogMetadata-default.json",
},
"exact": true,
"metadata": {
"lastUpdatedAt": undefined,
"sourceFilePath": "blog/post2.md",
},
"modules": {
"content": "@site/blog/post2.md",
"sidebar": "@aliased/data/blog-post-list-prop-default.json",
},
"path": "/blog/post2",
},
{
"component": "@theme/BlogPostPage",
"context": {
"blogMetadata": "@aliased/data/blogMetadata-default.json",
},
"exact": true,
"metadata": {
"lastUpdatedAt": undefined,
"sourceFilePath": "blog/post3.md",
},
"modules": {
"content": "@site/blog/post3.md",
"sidebar": "@aliased/data/blog-post-list-prop-default.json",
},
"path": "/blog/post3",
},
{
"component": "@theme/BlogPostPage",
"context": {
"blogMetadata": "@aliased/data/blogMetadata-default.json",
},
"exact": true,
"metadata": {
"lastUpdatedAt": undefined,
"sourceFilePath": "blog/post4.md",
},
"modules": {
"content": "@site/blog/post4.md",
"sidebar": "@aliased/data/blog-post-list-prop-default.json",
},
"path": "/blog/post4",
},
{
"component": "@theme/BlogPostPage",
"context": {
"blogMetadata": "@aliased/data/blogMetadata-default.json",
},
"exact": true,
"metadata": {
"lastUpdatedAt": undefined,
"sourceFilePath": "blog/post5.md",
},
"modules": {
"content": "@site/blog/post5.md",
"sidebar": "@aliased/data/blog-post-list-prop-default.json",
},
"path": "/blog/post5",
},
{
"component": "@theme/BlogPostPage",
"context": {
"blogMetadata": "@aliased/data/blogMetadata-default.json",
},
"exact": true,
"metadata": {
"lastUpdatedAt": undefined,
"sourceFilePath": "blog/post6.md",
},
"modules": {
"content": "@site/blog/post6.md",
"sidebar": "@aliased/data/blog-post-list-prop-default.json",
},
"path": "/blog/post6",
},
{
"component": "@theme/BlogListPage",
"exact": true,
"modules": {
"items": [
{
"content": {
"__import": true,
"path": "@site/blog/post1.md",
"query": {
"truncated": true,
},
},
},
{
"content": {
"__import": true,
"path": "@site/blog/post2.md",
"query": {
"truncated": true,
},
},
},
],
"sidebar": "@aliased/data/blog-post-list-prop-default.json",
},
"path": "/blog",
"props": {
"metadata": {
"blogDescription": "Custom blog description",
"blogTitle": "Custom blog title",
"nextPage": "/blog/page/2",
"page": 1,
"permalink": "/blog",
"postsPerPage": 2,
"previousPage": undefined,
"totalCount": 5,
"totalPages": 3,
},
},
},
{
"component": "@theme/BlogListPage",
"exact": true,
"modules": {
"items": [
{
"content": {
"__import": true,
"path": "@site/blog/post4.md",
"query": {
"truncated": true,
},
},
},
{
"content": {
"__import": true,
"path": "@site/blog/post5.md",
"query": {
"truncated": true,
},
},
},
],
"sidebar": "@aliased/data/blog-post-list-prop-default.json",
},
"path": "/blog/page/2",
"props": {
"metadata": {
"blogDescription": "Custom blog description",
"blogTitle": "Custom blog title",
"nextPage": "/blog/page/3",
"page": 2,
"permalink": "/blog/page/2",
"postsPerPage": 2,
"previousPage": "/blog",
"totalCount": 5,
"totalPages": 3,
},
},
},
{
"component": "@theme/BlogListPage",
"exact": true,
"modules": {
"items": [
{
"content": {
"__import": true,
"path": "@site/blog/post6.md",
"query": {
"truncated": true,
},
},
},
],
"sidebar": "@aliased/data/blog-post-list-prop-default.json",
},
"path": "/blog/page/3",
"props": {
"metadata": {
"blogDescription": "Custom blog description",
"blogTitle": "Custom blog title",
"nextPage": undefined,
"page": 3,
"permalink": "/blog/page/3",
"postsPerPage": 2,
"previousPage": "/blog/page/2",
"totalCount": 5,
"totalPages": 3,
},
},
},
{
"component": "@theme/BlogArchivePage",
"exact": true,
"path": "/blog/archive",
"props": {
"archive": {
"blogPosts": [
{
"content": "Content for post1",
"id": "post1",
"metadata": {
"authors": [
{
"key": "author1",
},
],
"date": 2020-01-01T00:00:00.000Z,
"description": "Description for post1",
"frontMatter": {},
"permalink": "/blog/post1",
"readingTime": 2,
"source": "@site/blog/post1.md",
"tags": [],
"title": "Title for post1",
},
},
{
"content": "Content for post2",
"id": "post2",
"metadata": {
"authors": [
{
"key": "author1",
},
],
"date": 2020-01-01T00:00:00.000Z,
"description": "Description for post2",
"frontMatter": {},
"permalink": "/blog/post2",
"readingTime": 2,
"source": "@site/blog/post2.md",
"tags": [],
"title": "Title for post2",
},
},
{
"content": "Content for post4",
"id": "post4",
"metadata": {
"authors": [
{
"key": "author1",
},
{
"key": "author2",
},
],
"date": 2020-01-01T00:00:00.000Z,
"description": "Description for post4",
"frontMatter": {},
"permalink": "/blog/post4",
"readingTime": 2,
"source": "@site/blog/post4.md",
"tags": [],
"title": "Title for post4",
},
},
{
"content": "Content for post5",
"id": "post5",
"metadata": {
"authors": [
{
"key": "author2",
},
{
"key": "author3",
},
],
"date": 2020-01-01T00:00:00.000Z,
"description": "Description for post5",
"frontMatter": {},
"permalink": "/blog/post5",
"readingTime": 2,
"source": "@site/blog/post5.md",
"tags": [],
"title": "Title for post5",
},
},
{
"content": "Content for post6",
"id": "post6",
"metadata": {
"authors": [],
"date": 2020-01-01T00:00:00.000Z,
"description": "Description for post6",
"frontMatter": {},
"permalink": "/blog/post6",
"readingTime": 2,
"source": "@site/blog/post6.md",
"tags": [],
"title": "Title for post6",
},
},
],
},
},
},
{
"component": "@theme/Blog/Pages/BlogAuthorsListPage",
"context": {
"blogMetadata": "@aliased/data/blogMetadata-default.json",
},
"exact": true,
"modules": {
"sidebar": "@aliased/data/blog-post-list-prop-default.json",
},
"path": "/blog/authors",
"props": {
"authors": [
{
"count": 3,
"key": "author1",
"name": "Author 1",
"page": {
"permalink": "/blog/authors/author1",
},
},
{
"count": 2,
"key": "author2",
"name": "Author 2",
"page": null,
},
{
"count": 1,
"key": "author3",
"name": "Author 3",
"page": {
"permalink": "/blog/authors/author3",
},
},
],
},
},
{
"component": "@theme/Blog/Pages/BlogAuthorsPostsPage",
"context": {
"blogMetadata": "@aliased/data/blogMetadata-default.json",
},
"exact": true,
"modules": {
"items": [
{
"content": {
"__import": true,
"path": "@site/blog/post1.md",
"query": {
"truncated": true,
},
},
},
{
"content": {
"__import": true,
"path": "@site/blog/post2.md",
"query": {
"truncated": true,
},
},
},
],
"sidebar": "@aliased/data/blog-post-list-prop-default.json",
},
"path": "/blog/authors/author1",
"props": {
"author": {
"count": 3,
"key": "author1",
"name": "Author 1",
"page": {
"permalink": "/blog/authors/author1",
},
},
"listMetadata": {
"blogDescription": "Custom blog description",
"blogTitle": "Custom blog title",
"nextPage": "/blog/authors/author1/page/2",
"page": 1,
"permalink": "/blog/authors/author1",
"postsPerPage": 2,
"previousPage": undefined,
"totalCount": 3,
"totalPages": 2,
},
},
},
{
"component": "@theme/Blog/Pages/BlogAuthorsPostsPage",
"context": {
"blogMetadata": "@aliased/data/blogMetadata-default.json",
},
"exact": true,
"modules": {
"items": [
{
"content": {
"__import": true,
"path": "@site/blog/post4.md",
"query": {
"truncated": true,
},
},
},
],
"sidebar": "@aliased/data/blog-post-list-prop-default.json",
},
"path": "/blog/authors/author1/page/2",
"props": {
"author": {
"count": 3,
"key": "author1",
"name": "Author 1",
"page": {
"permalink": "/blog/authors/author1",
},
},
"listMetadata": {
"blogDescription": "Custom blog description",
"blogTitle": "Custom blog title",
"nextPage": undefined,
"page": 2,
"permalink": "/blog/authors/author1/page/2",
"postsPerPage": 2,
"previousPage": "/blog/authors/author1",
"totalCount": 3,
"totalPages": 2,
},
},
},
{
"component": "@theme/Blog/Pages/BlogAuthorsPostsPage",
"context": {
"blogMetadata": "@aliased/data/blogMetadata-default.json",
},
"exact": true,
"modules": {
"items": [
{
"content": {
"__import": true,
"path": "@site/blog/post5.md",
"query": {
"truncated": true,
},
},
},
],
"sidebar": "@aliased/data/blog-post-list-prop-default.json",
},
"path": "/blog/authors/author3",
"props": {
"author": {
"count": 1,
"key": "author3",
"name": "Author 3",
"page": {
"permalink": "/blog/authors/author3",
},
},
"listMetadata": {
"blogDescription": "Custom blog description",
"blogTitle": "Custom blog title",
"nextPage": undefined,
"page": 1,
"permalink": "/blog/authors/author3",
"postsPerPage": 2,
"previousPage": undefined,
"totalCount": 1,
"totalPages": 1,
},
},
},
]
`;
exports[`buildAllRoutes works for realistic blog post 3`] = `
{
"blog-post-list-prop-default.json": {
"items": [
{
"date": 2020-01-01T00:00:00.000Z,
"permalink": "/blog/post1",
"title": "Title for post1",
"unlisted": undefined,
},
{
"date": 2020-01-01T00:00:00.000Z,
"permalink": "/blog/post2",
"title": "Title for post2",
"unlisted": undefined,
},
{
"date": 2020-01-01T00:00:00.000Z,
"permalink": "/blog/post3",
"title": "Title for post3",
"unlisted": true,
},
{
"date": 2020-01-01T00:00:00.000Z,
"permalink": "/blog/post4",
"title": "Title for post4",
"unlisted": undefined,
},
{
"date": 2020-01-01T00:00:00.000Z,
"permalink": "/blog/post5",
"title": "Title for post5",
"unlisted": undefined,
},
],
"title": "Custom blog sidebar title",
},
"blogMetadata-default.json": {
"authorsListPath": "/blog/authors",
"blogBasePath": "/blog",
"blogTitle": "Custom blog title",
},
"site-blog-post-1-md-235.json": {
"authors": [
{
"key": "author1",
},
],
"date": 2020-01-01T00:00:00.000Z,
"description": "Description for post1",
"frontMatter": {},
"permalink": "/blog/post1",
"readingTime": 2,
"source": "@site/blog/post1.md",
"tags": [],
"title": "Title for post1",
},
"site-blog-post-2-md-b42.json": {
"authors": [
{
"key": "author1",
},
],
"date": 2020-01-01T00:00:00.000Z,
"description": "Description for post2",
"frontMatter": {},
"permalink": "/blog/post2",
"readingTime": 2,
"source": "@site/blog/post2.md",
"tags": [],
"title": "Title for post2",
},
"site-blog-post-3-md-3b7.json": {
"authors": [
{
"key": "author3",
},
],
"date": 2020-01-01T00:00:00.000Z,
"description": "Description for post3",
"frontMatter": {},
"permalink": "/blog/post3",
"readingTime": 2,
"source": "@site/blog/post3.md",
"tags": [],
"title": "Title for post3",
"unlisted": true,
},
"site-blog-post-4-md-15a.json": {
"authors": [
{
"key": "author1",
},
{
"key": "author2",
},
],
"date": 2020-01-01T00:00:00.000Z,
"description": "Description for post4",
"frontMatter": {},
"permalink": "/blog/post4",
"readingTime": 2,
"source": "@site/blog/post4.md",
"tags": [],
"title": "Title for post4",
},
"site-blog-post-5-md-274.json": {
"authors": [
{
"key": "author2",
},
{
"key": "author3",
},
],
"date": 2020-01-01T00:00:00.000Z,
"description": "Description for post5",
"frontMatter": {},
"permalink": "/blog/post5",
"readingTime": 2,
"source": "@site/blog/post5.md",
"tags": [],
"title": "Title for post5",
},
"site-blog-post-6-md-3ca.json": {
"authors": [],
"date": 2020-01-01T00:00:00.000Z,
"description": "Description for post6",
"frontMatter": {},
"permalink": "/blog/post6",
"readingTime": 2,
"source": "@site/blog/post6.md",
"tags": [],
"title": "Title for post6",
},
}
`;

View File

@ -24,24 +24,7 @@ exports[`getContentTranslationFiles returns translation files matching snapshot
exports[`translateContent falls back when translation is incomplete 1`] = `
{
"blogListPaginated": [
{
"items": [
"hello",
],
"metadata": {
"blogDescription": "Someone's random blog",
"blogTitle": "My blog",
"nextPage": undefined,
"page": 1,
"permalink": "/",
"postsPerPage": 10,
"previousPage": undefined,
"totalCount": 1,
"totalPages": 1,
},
},
],
"blogDescription": "Someone's random blog",
"blogPosts": [
{
"content": "",
@ -63,29 +46,13 @@ exports[`translateContent falls back when translation is incomplete 1`] = `
"blogSidebarTitle": "All my posts",
"blogTags": {},
"blogTagsListPath": "/tags",
"blogTitle": "My blog",
}
`;
exports[`translateContent returns translated loaded 1`] = `
{
"blogListPaginated": [
{
"items": [
"hello",
],
"metadata": {
"blogDescription": "Someone's random blog (translated)",
"blogTitle": "My blog (translated)",
"nextPage": undefined,
"page": 1,
"permalink": "/",
"postsPerPage": 10,
"previousPage": undefined,
"totalCount": 1,
"totalPages": 1,
},
},
],
"blogDescription": "Someone's random blog (translated)",
"blogPosts": [
{
"content": "",
@ -107,5 +74,6 @@ exports[`translateContent returns translated loaded 1`] = `
"blogSidebarTitle": "All my posts (translated)",
"blogTags": {},
"blogTagsListPath": "/tags",
"blogTitle": "My blog (translated)",
}
`;

View File

@ -8,7 +8,10 @@
import {jest} from '@jest/globals';
import path from 'path';
import fs from 'fs-extra';
import {DEFAULT_PARSE_FRONT_MATTER} from '@docusaurus/utils';
import {
DEFAULT_PARSE_FRONT_MATTER,
DEFAULT_VCS_CONFIG,
} from '@docusaurus/utils';
import {fromPartial} from '@total-typescript/shoehorn';
import {normalizePluginOptions} from '@docusaurus/utils-validation';
import tree from 'tree-node-cli';
@ -51,7 +54,7 @@ function getBlogContentPaths(siteDir: string): BlogContentPaths {
}
async function testGenerateFeeds(
context: LoadContext,
contextInput: LoadContext,
optionsInput: Options,
): Promise<void> {
const options = validateOptions({
@ -62,6 +65,17 @@ async function testGenerateFeeds(
options: optionsInput,
});
const context: LoadContext = {
...contextInput,
siteConfig: {
...contextInput.siteConfig,
future: {
...contextInput.siteConfig?.future,
experimental_vcs: DEFAULT_VCS_CONFIG,
},
},
};
const contentPaths = getBlogContentPaths(context.siteDir);
const authorsMap = await getAuthorsMap({
contentPaths,

View File

@ -8,12 +8,7 @@
import {jest} from '@jest/globals';
import * as path from 'path';
import {normalizePluginOptions} from '@docusaurus/utils-validation';
import {
posixPath,
getFileCommitDate,
LAST_UPDATE_FALLBACK,
getLocaleConfig,
} from '@docusaurus/utils';
import {posixPath, getLocaleConfig, TEST_VCS} from '@docusaurus/utils';
import {DEFAULT_FUTURE_CONFIG} from '@docusaurus/core/src/server/configValidation';
import pluginContentBlog from '../index';
import {validateOptions} from '../options';
@ -32,6 +27,10 @@ import type {
EditUrlFunction,
} from '@docusaurus/plugin-content-blog';
async function getFileCreationDate(filePath: string): Promise<Date> {
return new Date((await TEST_VCS.getFileCreationInfo(filePath)).timestamp);
}
const markdown: MarkdownConfig = {
format: 'mdx',
mermaid: true,
@ -561,9 +560,7 @@ describe('blog plugin', () => {
const blogPosts = await getBlogPosts(siteDir);
const noDateSource = path.posix.join('@site', PluginPath, 'no date.md');
const noDateSourceFile = path.posix.join(siteDir, PluginPath, 'no date.md');
// We know the file exists and we know we have git
const result = await getFileCommitDate(noDateSourceFile, {age: 'oldest'});
const noDateSourceTime = result.date;
const noDateSourceTime = await getFileCreationDate(noDateSourceFile);
expect({
...getByTitle(blogPosts, 'no date').metadata,
@ -641,10 +638,7 @@ describe('blog plugin', () => {
},
DefaultI18N,
);
const {blogPosts, blogTags, blogListPaginated} =
(await plugin.loadContent!())!;
expect(blogListPaginated).toHaveLength(3);
const {blogPosts, blogTags} = (await plugin.loadContent!())!;
expect(Object.keys(blogTags)).toHaveLength(2);
expect(blogTags).toMatchSnapshot();
@ -674,29 +668,23 @@ describe('last update', () => {
);
const {blogPosts} = (await plugin.loadContent!())!;
const TestLastUpdate = await TEST_VCS.getFileLastUpdateInfo('any path');
expect(blogPosts[0]?.metadata.lastUpdatedBy).toBe('seb');
expect(blogPosts[0]?.metadata.lastUpdatedAt).toBe(
LAST_UPDATE_FALLBACK.lastUpdatedAt,
lastUpdateFor('2021-01-01'),
);
expect(blogPosts[1]?.metadata.lastUpdatedBy).toBe(
LAST_UPDATE_FALLBACK.lastUpdatedBy,
);
expect(blogPosts[1]?.metadata.lastUpdatedBy).toBe(TestLastUpdate.author);
expect(blogPosts[1]?.metadata.lastUpdatedAt).toBe(
LAST_UPDATE_FALLBACK.lastUpdatedAt,
lastUpdateFor('2021-01-01'),
);
expect(blogPosts[2]?.metadata.lastUpdatedBy).toBe('seb');
expect(blogPosts[2]?.metadata.lastUpdatedAt).toBe(
lastUpdateFor('2021-01-01'),
);
expect(blogPosts[2]?.metadata.lastUpdatedAt).toBe(TestLastUpdate.timestamp);
expect(blogPosts[3]?.metadata.lastUpdatedBy).toBe(
LAST_UPDATE_FALLBACK.lastUpdatedBy,
);
expect(blogPosts[3]?.metadata.lastUpdatedAt).toBe(
lastUpdateFor('2021-01-01'),
);
expect(blogPosts[3]?.metadata.lastUpdatedBy).toBe(TestLastUpdate.author);
expect(blogPosts[3]?.metadata.lastUpdatedAt).toBe(TestLastUpdate.timestamp);
});
it('time only', async () => {
@ -710,29 +698,27 @@ describe('last update', () => {
);
const {blogPosts} = (await plugin.loadContent!())!;
expect(blogPosts[0]?.metadata.title).toBe('Author');
const TestLastUpdate = await TEST_VCS.getFileLastUpdateInfo('any path');
expect(blogPosts[0]?.metadata.title).toBe('Both');
expect(blogPosts[0]?.metadata.lastUpdatedBy).toBeUndefined();
expect(blogPosts[0]?.metadata.lastUpdatedAt).toBe(
LAST_UPDATE_FALLBACK.lastUpdatedAt,
lastUpdateFor('2021-01-01'),
);
expect(blogPosts[1]?.metadata.title).toBe('Nothing');
expect(blogPosts[1]?.metadata.title).toBe('Last update date');
expect(blogPosts[1]?.metadata.lastUpdatedBy).toBeUndefined();
expect(blogPosts[1]?.metadata.lastUpdatedAt).toBe(
LAST_UPDATE_FALLBACK.lastUpdatedAt,
lastUpdateFor('2021-01-01'),
);
expect(blogPosts[2]?.metadata.title).toBe('Both');
expect(blogPosts[2]?.metadata.title).toBe('Author');
expect(blogPosts[2]?.metadata.lastUpdatedBy).toBeUndefined();
expect(blogPosts[2]?.metadata.lastUpdatedAt).toBe(
lastUpdateFor('2021-01-01'),
);
expect(blogPosts[2]?.metadata.lastUpdatedAt).toBe(TestLastUpdate.timestamp);
expect(blogPosts[3]?.metadata.title).toBe('Last update date');
expect(blogPosts[3]?.metadata.title).toBe('Nothing');
expect(blogPosts[3]?.metadata.lastUpdatedBy).toBeUndefined();
expect(blogPosts[3]?.metadata.lastUpdatedAt).toBe(
lastUpdateFor('2021-01-01'),
);
expect(blogPosts[3]?.metadata.lastUpdatedAt).toBe(TestLastUpdate.timestamp);
});
it('author only', async () => {
@ -746,20 +732,18 @@ describe('last update', () => {
);
const {blogPosts} = (await plugin.loadContent!())!;
const TestLastUpdate = await TEST_VCS.getFileLastUpdateInfo('any path');
expect(blogPosts[0]?.metadata.lastUpdatedBy).toBe('seb');
expect(blogPosts[0]?.metadata.lastUpdatedAt).toBeUndefined();
expect(blogPosts[1]?.metadata.lastUpdatedBy).toBe(
LAST_UPDATE_FALLBACK.lastUpdatedBy,
);
expect(blogPosts[1]?.metadata.lastUpdatedBy).toBe(TestLastUpdate.author);
expect(blogPosts[1]?.metadata.lastUpdatedAt).toBeUndefined();
expect(blogPosts[2]?.metadata.lastUpdatedBy).toBe('seb');
expect(blogPosts[2]?.metadata.lastUpdatedAt).toBeUndefined();
expect(blogPosts[3]?.metadata.lastUpdatedBy).toBe(
LAST_UPDATE_FALLBACK.lastUpdatedBy,
);
expect(blogPosts[3]?.metadata.lastUpdatedBy).toBe(TestLastUpdate.author);
expect(blogPosts[3]?.metadata.lastUpdatedAt).toBeUndefined();
});

View File

@ -0,0 +1,324 @@
/**
* Copyright (c) Facebook, Inc. and its affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree.
*/
import * as _ from 'lodash';
import {fromPartial} from '@total-typescript/shoehorn';
import {buildAllRoutes} from '../routes';
import {DEFAULT_OPTIONS} from '../options';
import type {PartialDeep} from '@total-typescript/shoehorn';
import type {BlogPost, BlogPostMetadata} from '@docusaurus/plugin-content-blog';
type Params = Parameters<typeof buildAllRoutes>[0];
async function testBuildAllRoutes(overrides: PartialDeep<Params> = {}) {
const createData = jest.fn(
async (name: string, _data: unknown) => `/data/${name}`,
);
const params: Params = fromPartial<Params>({
baseUrl: '/',
aliasedSource: (str: string) => `@aliased${str}`,
...overrides,
content: {
blogTitle: 'Blog Title',
blogDescription: 'Blog Description',
blogSidebarTitle: 'Blog Sidebar Title',
authorsMap: {},
blogTagsListPath: '',
blogTags: {},
blogPosts: [],
...overrides?.content,
},
options: {
...DEFAULT_OPTIONS,
...overrides?.options,
},
actions: {
createData,
...overrides?.actions,
},
});
const routes = await buildAllRoutes(params);
const data = Object.fromEntries(
createData.mock.calls.map((call) => [call[0], call[1]]),
);
function getRouteByPath(path: string) {
const route = routes.find((r) => r.path === path);
if (!route) {
throw new Error(`Route not found for path: ${path}`);
}
return route;
}
function getRoutesByComponent(component: string) {
return routes.filter((r) => r.component === component);
}
return {routes, data, utils: {getRouteByPath, getRoutesByComponent}};
}
function blogPost(overrides: PartialDeep<BlogPost> = {}): BlogPost {
const id = overrides.id ?? 'blog-post';
return fromPartial<BlogPost>({
id,
content: `Content for ${id}`,
...overrides,
metadata: fromPartial<BlogPostMetadata>({
title: `Title for ${id}`,
description: `Description for ${id}`,
permalink: `/blog/${id}`,
source: `@site/blog/${id}.md`,
date: new Date('2020-01-01'),
tags: [],
readingTime: 2,
authors: [],
frontMatter: {
...overrides?.metadata?.frontMatter,
},
...overrides?.metadata,
}),
});
}
describe('buildAllRoutes', () => {
it('works for empty blog', async () => {
const {routes, data} = await testBuildAllRoutes({
content: {
blogPosts: [],
},
});
expect(routes).toMatchInlineSnapshot(`
[
{
"component": "@theme/BlogListPage",
"exact": true,
"modules": {
"items": [],
"sidebar": "@aliased/data/blog-post-list-prop-default.json",
},
"path": "/blog",
"props": {
"metadata": {
"blogDescription": "Blog Description",
"blogTitle": "Blog Title",
"nextPage": undefined,
"page": 1,
"permalink": "/blog",
"postsPerPage": 10,
"previousPage": undefined,
"totalCount": 0,
"totalPages": 1,
},
},
},
]
`);
expect(data).toMatchInlineSnapshot(`
{
"blog-post-list-prop-default.json": {
"items": [],
"title": "Blog Sidebar Title",
},
"blogMetadata-default.json": {
"authorsListPath": "/blog/authors",
"blogBasePath": "/blog",
"blogTitle": "Blog Title",
},
}
`);
});
it('works for single blog post', async () => {
const {routes, data} = await testBuildAllRoutes({
content: {
blogPosts: [blogPost()],
},
});
expect(routes).toMatchInlineSnapshot(`
[
{
"component": "@theme/BlogPostPage",
"context": {
"blogMetadata": "@aliased/data/blogMetadata-default.json",
},
"exact": true,
"metadata": {
"lastUpdatedAt": undefined,
"sourceFilePath": "blog/blog-post.md",
},
"modules": {
"content": "@site/blog/blog-post.md",
"sidebar": "@aliased/data/blog-post-list-prop-default.json",
},
"path": "/blog/blog-post",
},
{
"component": "@theme/BlogListPage",
"exact": true,
"modules": {
"items": [
{
"content": {
"__import": true,
"path": "@site/blog/blog-post.md",
"query": {
"truncated": true,
},
},
},
],
"sidebar": "@aliased/data/blog-post-list-prop-default.json",
},
"path": "/blog",
"props": {
"metadata": {
"blogDescription": "Blog Description",
"blogTitle": "Blog Title",
"nextPage": undefined,
"page": 1,
"permalink": "/blog",
"postsPerPage": 10,
"previousPage": undefined,
"totalCount": 1,
"totalPages": 1,
},
},
},
{
"component": "@theme/BlogArchivePage",
"exact": true,
"path": "/blog/archive",
"props": {
"archive": {
"blogPosts": [
{
"content": "Content for blog-post",
"id": "blog-post",
"metadata": {
"authors": [],
"date": 2020-01-01T00:00:00.000Z,
"description": "Description for blog-post",
"frontMatter": {},
"permalink": "/blog/blog-post",
"readingTime": 2,
"source": "@site/blog/blog-post.md",
"tags": [],
"title": "Title for blog-post",
},
},
],
},
},
},
]
`);
expect(data).toMatchInlineSnapshot(`
{
"blog-post-list-prop-default.json": {
"items": [
{
"date": 2020-01-01T00:00:00.000Z,
"permalink": "/blog/blog-post",
"title": "Title for blog-post",
"unlisted": undefined,
},
],
"title": "Blog Sidebar Title",
},
"blogMetadata-default.json": {
"authorsListPath": "/blog/authors",
"blogBasePath": "/blog",
"blogTitle": "Blog Title",
},
"site-blog-blog-post-md-0d7.json": {
"authors": [],
"date": 2020-01-01T00:00:00.000Z,
"description": "Description for blog-post",
"frontMatter": {},
"permalink": "/blog/blog-post",
"readingTime": 2,
"source": "@site/blog/blog-post.md",
"tags": [],
"title": "Title for blog-post",
},
}
`);
});
it('works for realistic blog post', async () => {
const {routes, data} = await testBuildAllRoutes({
options: {
postsPerPage: 2,
},
content: {
blogTitle: 'Custom blog title',
blogDescription: 'Custom blog description',
blogSidebarTitle: 'Custom blog sidebar title',
blogPosts: [
blogPost({id: 'post1', metadata: {authors: [{key: 'author1'}]}}),
blogPost({id: 'post2', metadata: {authors: [{key: 'author1'}]}}),
blogPost({
id: 'post3',
metadata: {
authors: [{key: 'author3'}],
unlisted: true,
},
}),
blogPost({
id: 'post4',
metadata: {
authors: [{key: 'author1'}, {key: 'author2'}],
},
}),
blogPost({
id: 'post5',
metadata: {authors: [{key: 'author2'}, {key: 'author3'}]},
}),
blogPost({id: 'post6'}),
],
authorsMap: {
author1: {
key: 'author1',
name: 'Author 1',
page: {permalink: '/blog/authors/author1'},
},
author2: {
key: 'author2',
name: 'Author 2',
page: null,
},
author3: {
key: 'author3',
name: 'Author 3',
page: {permalink: '/blog/authors/author3'},
},
},
},
});
expect(_.countBy(routes, 'component')).toMatchInlineSnapshot(`
{
"@theme/Blog/Pages/BlogAuthorsListPage": 1,
"@theme/Blog/Pages/BlogAuthorsPostsPage": 3,
"@theme/BlogArchivePage": 1,
"@theme/BlogListPage": 3,
"@theme/BlogPostPage": 6,
}
`);
expect(routes).toMatchSnapshot();
expect(data).toMatchSnapshot();
});
});

View File

@ -6,6 +6,7 @@
*/
import {updateTranslationFileMessages} from '@docusaurus/utils';
import {fromPartial} from '@total-typescript/shoehorn';
import {getTranslationFiles, translateContent} from '../translations';
import {DEFAULT_OPTIONS} from '../options';
import type {
@ -16,13 +17,13 @@ import type {
const sampleBlogOptions: PluginOptions = {
...DEFAULT_OPTIONS,
blogSidebarTitle: 'All my posts',
blogTitle: 'My blog',
blogDescription: "Someone's random blog",
blogSidebarTitle: 'All my posts',
};
const sampleBlogPosts: BlogPost[] = [
{
fromPartial({
id: 'hello',
metadata: {
permalink: '/blog/2021/06/19/hello',
@ -37,27 +38,13 @@ const sampleBlogPosts: BlogPost[] = [
unlisted: false,
},
content: '',
},
}),
];
const sampleBlogContent: BlogContent = {
blogTitle: sampleBlogOptions.blogTitle,
blogDescription: sampleBlogOptions.blogDescription,
blogSidebarTitle: sampleBlogOptions.blogSidebarTitle,
blogListPaginated: [
{
items: ['hello'],
metadata: {
permalink: '/',
page: 1,
postsPerPage: 10,
totalPages: 1,
totalCount: 1,
previousPage: undefined,
nextPage: undefined,
blogTitle: sampleBlogOptions.blogTitle,
blogDescription: sampleBlogOptions.blogDescription,
},
},
],
blogPosts: sampleBlogPosts,
blogTags: {},
blogTagsListPath: '/tags',

View File

@ -19,7 +19,6 @@ import {
Globby,
groupTaggedItems,
getTagVisibility,
getFileCommitDate,
getContentPathList,
isUnlisted,
isDraft,
@ -225,6 +224,7 @@ async function processBlogSourceFile(
siteConfig: {
baseUrl,
markdown: {parseFrontMatter},
future: {experimental_vcs: vcs},
},
siteDir,
i18n,
@ -257,6 +257,7 @@ async function processBlogSourceFile(
blogSourceAbsolute,
options,
frontMatter.last_update,
vcs,
);
const draft = isDraft({frontMatter});
@ -285,17 +286,11 @@ async function processBlogSourceFile(
return parsedBlogFileName.date;
}
try {
const result = await getFileCommitDate(blogSourceAbsolute, {
age: 'oldest',
includeAuthor: false,
});
return result.date;
} catch (err) {
logger.warn(err);
const result = await vcs.getFileCreationInfo(blogSourceAbsolute);
if (result == null) {
return (await fs.stat(blogSourceAbsolute)).birthtime;
}
return new Date(result.timestamp);
}
const date = await getDate();
@ -406,6 +401,8 @@ export async function generateBlogPosts(
ignore: exclude,
});
// TODO this should be done outside of this function
// directly in plugin loadContent()
const tagsFile = await getTagsFile({contentPaths, tags: options.tags});
async function doProcessBlogSourceFile(blogSourceFile: string) {

View File

@ -17,7 +17,6 @@ import {
createAbsoluteFilePathMatcher,
getContentPathList,
getDataFilePath,
DEFAULT_PLUGIN_ID,
resolveMarkdownLinkPathname,
getLocaleConfig,
} from '@docusaurus/utils';
@ -25,7 +24,6 @@ import {getTagsFilePathsToWatch} from '@docusaurus/utils-validation';
import {createMDXLoaderItem} from '@docusaurus/mdx-loader';
import {
getBlogTags,
paginateBlogPosts,
shouldBeListed,
applyProcessBlogPosts,
generateBlogPosts,
@ -45,7 +43,6 @@ import type {
Assets,
BlogTags,
BlogContent,
BlogPaginated,
} from '@docusaurus/plugin-content-blog';
import type {RuleSetRule, RuleSetUseItem} from 'webpack';
@ -85,7 +82,7 @@ export default async function pluginContentBlog(
})
: undefined,
};
const pluginId = options.id ?? DEFAULT_PLUGIN_ID;
const pluginId = options.id;
const pluginDataDirRoot = path.join(generatedFilesDir, PluginName);
const dataDir = path.join(pluginDataDirRoot, pluginId);
@ -260,9 +257,10 @@ export default async function pluginContentBlog(
if (!blogPosts.length) {
return {
blogTitle,
blogDescription,
blogSidebarTitle,
blogPosts: [],
blogListPaginated: [],
blogTags: {},
blogTagsListPath,
authorsMap,
@ -291,15 +289,9 @@ export default async function pluginContentBlog(
}
});
const blogListPaginated: BlogPaginated[] = paginateBlogPosts({
blogPosts: listedBlogPosts,
blogTitle,
blogDescription,
postsPerPageOption,
basePageUrl: baseBlogUrl,
pageBasePath,
});
// TODO this is not the correct place to aggregate and paginate tags
// for reasons similar to https://github.com/facebook/docusaurus/pull/11562
// What we should do here is only read the tags file (similar to authors)
const blogTags: BlogTags = getBlogTags({
blogPosts,
postsPerPageOption,
@ -309,9 +301,10 @@ export default async function pluginContentBlog(
});
return {
blogTitle,
blogDescription,
blogSidebarTitle,
blogPosts,
blogListPaginated,
blogTags,
blogTagsListPath,
authorsMap,

View File

@ -15,7 +15,7 @@ import {
RouteBasePathSchema,
URISchema,
} from '@docusaurus/utils-validation';
import {GlobExcludeDefault} from '@docusaurus/utils';
import {DEFAULT_PLUGIN_ID, GlobExcludeDefault} from '@docusaurus/utils';
import type {
PluginOptions,
Options,
@ -25,6 +25,7 @@ import type {
import type {OptionValidationContext} from '@docusaurus/types';
export const DEFAULT_OPTIONS: PluginOptions = {
id: DEFAULT_PLUGIN_ID,
feedOptions: {
type: ['rss', 'atom'],
copyright: '',

View File

@ -431,7 +431,7 @@ declare module '@docusaurus/plugin-content-blog' {
export type PluginOptions = MDXOptions &
TagsPluginOptions & {
/** Plugin ID. */
id?: string;
id: string;
/**
* Path to the blog content directory on the file system, relative to site
* directory.
@ -583,9 +583,10 @@ declare module '@docusaurus/plugin-content-blog' {
export type AuthorsMap = {[authorKey: string]: AuthorWithKey};
export type BlogContent = {
blogSidebarTitle: string;
blogTitle: string; // for translation purposes
blogDescription: string; // for translation purposes
blogSidebarTitle: string; // for translation purposes
blogPosts: BlogPost[];
blogListPaginated: BlogPaginated[];
blogTags: BlogTags;
blogTagsListPath: string;
authorsMap?: AuthorsMap;

View File

@ -67,27 +67,24 @@ export async function buildAllRoutes({
blogArchiveComponent,
routeBasePath,
archiveBasePath,
blogTitle,
authorsBasePath,
postsPerPage,
blogDescription,
pageBasePath,
} = options;
const pluginId = options.id!;
const pluginId = options.id;
const {createData} = actions;
const {
blogTitle,
blogDescription,
blogSidebarTitle,
blogPosts,
blogListPaginated,
blogTags,
blogTagsListPath,
authorsMap,
} = content;
const authorsListPath = normalizeUrl([
baseUrl,
routeBasePath,
authorsBasePath,
]);
const blogBasePath = normalizeUrl([baseUrl, routeBasePath]);
const authorsListPath = normalizeUrl([blogBasePath, authorsBasePath]);
const listedBlogPosts = blogPosts.filter(shouldBeListed);
@ -119,7 +116,7 @@ export async function buildAllRoutes({
async function createBlogMetadataModule() {
const blogMetadata: BlogMetadata = {
blogBasePath: normalizeUrl([baseUrl, routeBasePath]),
blogBasePath,
blogTitle,
authorsListPath,
};
@ -156,7 +153,7 @@ export async function buildAllRoutes({
if (archiveBasePath && listedBlogPosts.length) {
return [
{
path: normalizeUrl([baseUrl, routeBasePath, archiveBasePath]),
path: normalizeUrl([blogBasePath, archiveBasePath]),
component: blogArchiveComponent,
exact: true,
props: {
@ -210,6 +207,15 @@ export async function buildAllRoutes({
}
function createBlogPostsPaginatedRoutes(): RouteConfig[] {
const blogListPaginated = paginateBlogPosts({
blogPosts: listedBlogPosts,
blogTitle,
blogDescription,
postsPerPageOption: postsPerPage,
basePageUrl: blogBasePath,
pageBasePath,
});
return blogListPaginated.map((paginated) => {
return {
path: paginated.metadata.permalink,
@ -294,12 +300,14 @@ export async function buildAllRoutes({
sidebar: sidebarModulePath,
},
props: {
authors: authors.map((author) =>
toAuthorItemProp({
authors: authors.map((author) => {
const authorPosts = blogPostsByAuthorKey[author.key] ?? [];
const listedAuthorPosts = authorPosts.filter(shouldBeListed);
return toAuthorItemProp({
author,
count: blogPostsByAuthorKey[author.key]?.length ?? 0,
}),
),
count: listedAuthorPosts.length,
});
}),
},
context: {
blogMetadata: blogMetadataModulePath,
@ -309,16 +317,17 @@ export async function buildAllRoutes({
function createAuthorPaginatedRoute(author: AuthorWithKey): RouteConfig[] {
const authorBlogPosts = blogPostsByAuthorKey[author.key] ?? [];
const listedAuthorBlogPosts = authorBlogPosts.filter(shouldBeListed);
if (!author.page) {
return [];
}
const pages = paginateBlogPosts({
blogPosts: authorBlogPosts,
blogPosts: listedAuthorBlogPosts,
basePageUrl: author.page.permalink,
blogDescription,
blogTitle,
pageBasePath: authorsBasePath,
pageBasePath,
postsPerPageOption: postsPerPage,
});
@ -332,7 +341,10 @@ export async function buildAllRoutes({
sidebar: sidebarModulePath,
},
props: {
author: toAuthorItemProp({author, count: authorBlogPosts.length}),
author: toAuthorItemProp({
author,
count: listedAuthorBlogPosts.length,
}),
listMetadata: metadata,
},
context: {

View File

@ -5,30 +5,8 @@
* LICENSE file in the root directory of this source tree.
*/
import type {TranslationFileContent, TranslationFile} from '@docusaurus/types';
import type {
PluginOptions,
BlogContent,
BlogPaginated,
} from '@docusaurus/plugin-content-blog';
function translateListPage(
blogListPaginated: BlogPaginated[],
translations: TranslationFileContent,
) {
return blogListPaginated.map((page) => {
const {items, metadata} = page;
return {
items,
metadata: {
...metadata,
blogTitle: translations.title?.message ?? page.metadata.blogTitle,
blogDescription:
translations.description?.message ?? page.metadata.blogDescription,
},
};
});
}
import type {TranslationFile} from '@docusaurus/types';
import type {PluginOptions, BlogContent} from '@docusaurus/plugin-content-blog';
export function getTranslationFiles(options: PluginOptions): TranslationFile[] {
return [
@ -56,14 +34,13 @@ export function translateContent(
content: BlogContent,
translationFiles: TranslationFile[],
): BlogContent {
const {content: optionsTranslations} = translationFiles[0]!;
const {content: translations} = translationFiles[0]!;
return {
...content,
blogTitle: translations.title?.message ?? content.blogTitle,
blogDescription:
translations.description?.message ?? content.blogDescription,
blogSidebarTitle:
optionsTranslations['sidebar.title']?.message ?? content.blogSidebarTitle,
blogListPaginated: translateListPage(
content.blogListPaginated,
optionsTranslations,
),
translations['sidebar.title']?.message ?? content.blogSidebarTitle,
};
}

View File

@ -12,8 +12,8 @@ import {
createSlugger,
posixPath,
DEFAULT_PLUGIN_ID,
LAST_UPDATE_FALLBACK,
getLocaleConfig,
TEST_VCS,
} from '@docusaurus/utils';
import {getTagsFile} from '@docusaurus/utils-validation';
import {createSidebarsUtils} from '../sidebars/utils';
@ -529,8 +529,8 @@ describe('simple site', () => {
custom_edit_url: 'https://github.com/customUrl/docs/lorem.md',
unrelated_front_matter: "won't be part of metadata",
},
lastUpdatedAt: LAST_UPDATE_FALLBACK.lastUpdatedAt,
lastUpdatedBy: LAST_UPDATE_FALLBACK.lastUpdatedBy,
lastUpdatedAt: TEST_VCS.LAST_UPDATE_INFO.timestamp,
lastUpdatedBy: TEST_VCS.LAST_UPDATE_INFO.author,
tags: [],
unlisted: false,
});
@ -664,7 +664,7 @@ describe('simple site', () => {
},
title: 'Last Update Author Only',
},
lastUpdatedAt: LAST_UPDATE_FALLBACK.lastUpdatedAt,
lastUpdatedAt: TEST_VCS.LAST_UPDATE_INFO.timestamp,
lastUpdatedBy: 'Custom Author (processed by parseFrontMatter)',
sidebarPosition: undefined,
tags: [],

View File

@ -568,13 +568,28 @@ describe('useSidebarBreadcrumbs', () => {
it('returns first level link', () => {
const pathname = '/somePathName';
const sidebar = [testCategory(), testLink({href: pathname})];
const sidebar = [testCategory(), testLink({href: pathname, docId: 'doc1'})];
expect(createUseSidebarBreadcrumbsMock(sidebar)(pathname)).toEqual([
sidebar[1],
]);
});
it('returns doc links only', () => {
const pathname = '/somePathName';
// A link that is not a doc link should not appear in the breadcrumbs
// See https://github.com/facebook/docusaurus/pull/11616
const nonDocLink = testLink({href: pathname});
const docLink = testLink({href: pathname, docId: 'doc1'});
const sidebar = [testCategory(), nonDocLink, docLink];
expect(createUseSidebarBreadcrumbsMock(sidebar)(pathname)).toEqual([
docLink,
]);
});
it('returns nested category', () => {
const pathname = '/somePathName';
@ -613,7 +628,7 @@ describe('useSidebarBreadcrumbs', () => {
it('returns nested link', () => {
const pathname = '/somePathName';
const link = testLink({href: pathname});
const link = testLink({href: pathname, docId: 'docNested'});
const categoryLevel3 = testCategory({
items: [testLink(), link, testLink()],
@ -657,6 +672,35 @@ describe('useSidebarBreadcrumbs', () => {
createUseSidebarBreadcrumbsMock(undefined, false)('/foo'),
).toBeNull();
});
// Regression test for https://github.com/facebook/docusaurus/issues/11612
it('returns the category that owns the URL, not a category with a link pointing to it', () => {
const categoryA: PropSidebarItemCategory = testCategory({
label: 'Category A',
href: '/category-a',
items: [
testLink({href: '/category-a/doc1', label: 'Doc 1'}),
testLink({href: '/category-a/doc2', label: 'Doc 2'}),
// This link points to Category B's generated-index
testLink({href: '/category-b', label: 'Go to Category B'}),
],
});
const categoryB: PropSidebarItemCategory = testCategory({
label: 'Category B',
href: '/category-b',
items: [
testLink({href: '/category-b/item1', label: 'Item 1'}),
testLink({href: '/category-b/item2', label: 'Item 2'}),
],
});
const sidebar: PropSidebar = [categoryA, categoryB];
expect(createUseSidebarBreadcrumbsMock(sidebar)('/category-b')).toEqual([
categoryB,
]);
});
});
describe('useCurrentSidebarCategory', () => {
@ -708,12 +752,16 @@ describe('useCurrentSidebarCategory', () => {
expect(mockUseCurrentSidebarCategory('/cat2')).toEqual(category2);
});
it('works for category link item', () => {
const link = testLink({href: '/my/link/path'});
it('works for category doc link item', () => {
const pathname = '/my/link/path';
const nonDocLink = testLink({href: pathname});
const docLink = testLink({href: pathname, docId: 'doc1'});
const category: PropSidebarItemCategory = testCategory({
href: '/cat1',
items: [testLink(), testLink(), link, testCategory()],
items: [testLink(), testLink(), nonDocLink, docLink, testCategory()],
});
const sidebar: PropSidebar = [
testLink(),
testLink(),
@ -724,18 +772,28 @@ describe('useCurrentSidebarCategory', () => {
const mockUseCurrentSidebarCategory =
createUseCurrentSidebarCategoryMock(sidebar);
expect(mockUseCurrentSidebarCategory('/my/link/path')).toEqual(category);
expect(mockUseCurrentSidebarCategory(pathname)).toEqual(category);
});
it('works for nested category link item', () => {
const link = testLink({href: '/my/link/path'});
const pathname = '/my/link/path';
const nonDocLink = testLink({href: pathname});
const docLink = testLink({href: pathname, docId: 'doc1'});
const category2: PropSidebarItemCategory = testCategory({
href: '/cat2',
items: [testLink(), testLink(), link, testCategory()],
items: [
testLink(),
testLink(),
testCategory({items: [nonDocLink]}),
nonDocLink,
docLink,
testCategory(),
],
});
const category1: PropSidebarItemCategory = testCategory({
href: '/cat1',
items: [testLink(), testLink(), category2, testCategory()],
items: [testLink(), nonDocLink, testLink(), category2, testCategory()],
});
const sidebar: PropSidebar = [
testLink(),
@ -780,6 +838,38 @@ describe('useCurrentSidebarCategory', () => {
`"Unexpected: cant find current sidebar in context"`,
);
});
// Regression test for https://github.com/facebook/docusaurus/issues/11612
it('returns the category that owns the URL, not a category with a link pointing to it', () => {
const categoryA: PropSidebarItemCategory = testCategory({
label: 'Category A',
href: '/category-a',
items: [
testLink({href: '/category-a/doc1', label: 'Doc 1'}),
testLink({href: '/category-a/doc2', label: 'Doc 2'}),
// This link points to Category B's generated-index
testLink({href: '/category-b', label: 'Go to Category B'}),
],
});
const categoryB: PropSidebarItemCategory = testCategory({
label: 'Category B',
href: '/category-b',
items: [
testLink({href: '/category-b/item1', label: 'Item 1'}),
testLink({href: '/category-b/item2', label: 'Item 2'}),
],
});
const sidebar: PropSidebar = [categoryA, categoryB];
const mockUseCurrentSidebarCategory =
createUseCurrentSidebarCategoryMock(sidebar);
// When visiting /category-b, we should get Category B (the owner),
// not Category A (which just has a link to it)
expect(mockUseCurrentSidebarCategory('/category-b')).toEqual(categoryB);
});
});
describe('useCurrentSidebarSiblings', () => {
@ -805,10 +895,10 @@ describe('useCurrentSidebarSiblings', () => {
testCategory(),
];
const mockUseCurrentSidebarCategory =
const mockUseCurrentSidebarSiblings =
createUseCurrentSidebarSiblingsMock(sidebar);
expect(mockUseCurrentSidebarCategory('/cat')).toEqual(category.items);
expect(mockUseCurrentSidebarSiblings('/cat')).toEqual(category.items);
});
it('works for sidebar root', () => {
@ -823,10 +913,10 @@ describe('useCurrentSidebarSiblings', () => {
testCategory(),
];
const mockUseCurrentSidebarCategory =
const mockUseCurrentSidebarSiblings =
createUseCurrentSidebarSiblingsMock(sidebar);
expect(mockUseCurrentSidebarCategory('/rootLink')).toEqual(sidebar);
expect(mockUseCurrentSidebarSiblings('/rootLink')).toEqual(sidebar);
});
it('works for nested sidebar category', () => {
@ -852,10 +942,13 @@ describe('useCurrentSidebarSiblings', () => {
});
it('works for category link item', () => {
const link = testLink({href: '/my/link/path'});
const pathname = '/my/link/path';
const nonDocLink = testLink({href: pathname});
const docLink = testLink({href: pathname, docId: 'doc1'});
const category: PropSidebarItemCategory = testCategory({
href: '/cat1',
items: [testLink(), testLink(), link, testCategory()],
items: [testLink(), testLink(), nonDocLink, docLink, testCategory()],
});
const sidebar: PropSidebar = [
testLink(),
@ -864,23 +957,24 @@ describe('useCurrentSidebarSiblings', () => {
testCategory(),
];
const mockUseCurrentSidebarCategory =
const mockUseCurrentSidebarSiblings =
createUseCurrentSidebarSiblingsMock(sidebar);
expect(mockUseCurrentSidebarCategory('/my/link/path')).toEqual(
category.items,
);
expect(mockUseCurrentSidebarSiblings(pathname)).toEqual(category.items);
});
it('works for nested category link item', () => {
const link = testLink({href: '/my/link/path'});
const pathname = '/my/link/path';
const nonDocLink = testLink({href: pathname});
const docLink = testLink({href: pathname, docId: 'doc1'});
const category2: PropSidebarItemCategory = testCategory({
href: '/cat2',
items: [testLink(), testLink(), link, testCategory()],
items: [testLink(), testLink(), nonDocLink, testCategory()],
});
const category1: PropSidebarItemCategory = testCategory({
href: '/cat1',
items: [testLink(), testLink(), category2, testCategory()],
items: [testLink(), testLink(), category2, docLink, testCategory()],
});
const sidebar: PropSidebar = [
testLink(),
@ -889,18 +983,16 @@ describe('useCurrentSidebarSiblings', () => {
testCategory(),
];
const mockUseCurrentSidebarCategory =
const mockUseCurrentSidebarSiblings =
createUseCurrentSidebarSiblingsMock(sidebar);
expect(mockUseCurrentSidebarCategory('/my/link/path')).toEqual(
category2.items,
);
expect(mockUseCurrentSidebarSiblings(pathname)).toEqual(category1.items);
});
it('throws when sidebar is missing', () => {
const mockUseCurrentSidebarCategory = createUseCurrentSidebarSiblingsMock();
const mockUseCurrentSidebarSiblings = createUseCurrentSidebarSiblingsMock();
expect(() =>
mockUseCurrentSidebarCategory('/cat'),
mockUseCurrentSidebarSiblings('/cat'),
).toThrowErrorMatchingInlineSnapshot(
`"Unexpected: cant find current sidebar in context"`,
);

View File

@ -234,15 +234,22 @@ function getSidebarBreadcrumbs({
}): PropSidebarBreadcrumbsItem[] {
const breadcrumbs: PropSidebarBreadcrumbsItem[] = [];
function extract(items: PropSidebarItem[]) {
function extract(items: PropSidebarItem[]): boolean {
for (const item of items) {
if (
(item.type === 'category' &&
(isSamePath(item.href, pathname) || extract(item.items))) ||
(item.type === 'link' && isSamePath(item.href, pathname))
// Extract category item
if (item.type === 'category') {
if (isSamePath(item.href, pathname) || extract(item.items)) {
breadcrumbs.unshift(item);
return true;
}
}
// Extract doc item
else if (
item.type === 'link' &&
item.docId &&
isSamePath(item.href, pathname)
) {
const filtered = onlyCategories && item.type !== 'category';
if (!filtered) {
if (!onlyCategories) {
breadcrumbs.unshift(item);
}
return true;

View File

@ -97,6 +97,7 @@ async function doProcessDocMetadata({
siteDir,
siteConfig: {
markdown: {parseFrontMatter},
future: {experimental_vcs: vcs},
},
} = context;
@ -125,6 +126,7 @@ async function doProcessDocMetadata({
filePath,
options,
lastUpdateFrontMatter,
vcs,
);
// E.g. api/plugins/myDoc -> myDoc; myDoc -> myDoc

View File

@ -8,6 +8,7 @@
import * as path from 'path';
import {fromPartial} from '@total-typescript/shoehorn';
import {DEFAULT_PARSE_FRONT_MATTER} from '@docusaurus/utils/src';
import {DEFAULT_VCS_CONFIG} from '@docusaurus/utils';
import {readVersionsMetadata} from '../version';
import {DEFAULT_OPTIONS} from '../../options';
import {loadVersion} from '../loadVersion';
@ -37,6 +38,9 @@ async function siteFixture(fixture: string) {
markdown: {
parseFrontMatter: DEFAULT_PARSE_FRONT_MATTER,
},
future: {
experimental_vcs: DEFAULT_VCS_CONFIG,
},
},
});

View File

@ -98,6 +98,7 @@ async function processPageSourceFile(
): Promise<Metadata | undefined> {
const {context, options, contentPaths} = params;
const {siteConfig, baseUrl, siteDir, i18n} = context;
const vcs = siteConfig.future.experimental_vcs;
const {editUrl} = options;
// Lookup in localized folder in priority
@ -180,6 +181,7 @@ async function processPageSourceFile(
source,
options,
frontMatter.last_update,
vcs,
);
if (isDraft({frontMatter})) {

View File

@ -13,7 +13,6 @@ import {
addTrailingPathSeparator,
createAbsoluteFilePathMatcher,
getContentPathList,
DEFAULT_PLUGIN_ID,
} from '@docusaurus/utils';
import {createMDXLoaderRule} from '@docusaurus/mdx-loader';
import {createAllRoutes} from './routes';
@ -38,7 +37,7 @@ export default async function pluginContentPages(
generatedFilesDir,
'docusaurus-plugin-content-pages',
);
const dataDir = path.join(pluginDataDirRoot, options.id ?? DEFAULT_PLUGIN_ID);
const dataDir = path.join(pluginDataDirRoot, options.id);
async function createPagesMDXLoaderRule(): Promise<RuleSetRule> {
const {

View File

@ -14,11 +14,12 @@ import {
RouteBasePathSchema,
URISchema,
} from '@docusaurus/utils-validation';
import {GlobExcludeDefault} from '@docusaurus/utils';
import {DEFAULT_PLUGIN_ID, GlobExcludeDefault} from '@docusaurus/utils';
import type {OptionValidationContext} from '@docusaurus/types';
import type {PluginOptions, Options} from '@docusaurus/plugin-content-pages';
export const DEFAULT_OPTIONS: PluginOptions = {
id: DEFAULT_PLUGIN_ID,
path: 'src/pages', // Path to data on filesystem, relative to site dir.
routeBasePath: '/', // URL Route.
include: ['**/*.{js,jsx,ts,tsx,md,mdx}'], // Extensions to include.

View File

@ -19,7 +19,7 @@ declare module '@docusaurus/plugin-content-pages' {
};
export type PluginOptions = MDXOptions & {
id?: string;
id: string;
path: string;
routeBasePath: string;
include: string[];

View File

@ -6,12 +6,14 @@
*/
import {fromPartial} from '@total-typescript/shoehorn';
import {DEFAULT_VCS_CONFIG} from '@docusaurus/utils';
import createSitemap from '../createSitemap';
import type {PluginOptions} from '../options';
import type {DocusaurusConfig, RouteConfig} from '@docusaurus/types';
const siteConfig: DocusaurusConfig = fromPartial({
url: 'https://example.com',
future: {experimental_vcs: DEFAULT_VCS_CONFIG},
});
const options: PluginOptions = {

View File

@ -6,6 +6,7 @@
*/
import {fromPartial} from '@total-typescript/shoehorn';
import {TEST_VCS} from '@docusaurus/utils';
import {createSitemapItem} from '../createSitemapItem';
import {DEFAULT_OPTIONS} from '../options';
import type {PluginOptions} from '../options';
@ -13,6 +14,7 @@ import type {DocusaurusConfig, RouteConfig} from '@docusaurus/types';
const siteConfig: DocusaurusConfig = fromPartial({
url: 'https://example.com',
future: {experimental_vcs: TEST_VCS},
});
function test(params: {

View File

@ -6,16 +6,17 @@
*/
import {applyTrailingSlash} from '@docusaurus/utils-common';
import {getLastUpdate, normalizeUrl} from '@docusaurus/utils';
import {normalizeUrl} from '@docusaurus/utils';
import type {LastModOption, SitemapItem} from './types';
import type {DocusaurusConfig, RouteConfig} from '@docusaurus/types';
import type {DocusaurusConfig, RouteConfig, VcsConfig} from '@docusaurus/types';
import type {PluginOptions} from './options';
async function getRouteLastUpdatedAt(
route: RouteConfig,
vcs: Pick<VcsConfig, 'getFileLastUpdateInfo'>,
): Promise<number | null | undefined> {
// Important to bail-out early here
// This can lead to duplicated getLastUpdate() calls and performance problems
// This can lead to duplicated VCS calls and performance problems
// See https://github.com/facebook/docusaurus/pull/11211
if (route.metadata?.lastUpdatedAt === null) {
return null;
@ -24,8 +25,10 @@ async function getRouteLastUpdatedAt(
return route.metadata?.lastUpdatedAt;
}
if (route.metadata?.sourceFilePath) {
const lastUpdate = await getLastUpdate(route.metadata?.sourceFilePath);
return lastUpdate?.lastUpdatedAt ?? null;
const lastUpdateInfo = await vcs.getFileLastUpdateInfo(
route.metadata?.sourceFilePath,
);
return lastUpdateInfo?.timestamp ?? null;
}
return undefined;
@ -46,14 +49,16 @@ function formatLastmod(timestamp: number, lastmodOption: LastModOption) {
async function getRouteLastmod({
route,
lastmod,
vcs,
}: {
route: RouteConfig;
lastmod: LastModOption | null;
vcs: Pick<VcsConfig, 'getFileLastUpdateInfo'>;
}): Promise<string | null> {
if (lastmod === null) {
return null;
}
const lastUpdatedAt = (await getRouteLastUpdatedAt(route)) ?? null;
const lastUpdatedAt = (await getRouteLastUpdatedAt(route, vcs)) ?? null;
return lastUpdatedAt ? formatLastmod(lastUpdatedAt, lastmod) : null;
}
@ -77,6 +82,10 @@ export async function createSitemapItem({
]),
changefreq,
priority,
lastmod: await getRouteLastmod({route, lastmod}),
lastmod: await getRouteLastmod({
route,
lastmod,
vcs: siteConfig.future.experimental_vcs,
}),
};
}

View File

@ -14,16 +14,23 @@ import styles from './styles.module.css';
type Token = Props['line'][number];
// Replaces '\n' by ''
// Historical code, not sure why we even need this :/
// This <br/ seems useful when the line has no content to prevent collapsing.
// For code blocks with "diff" languages, this makes the empty lines collapse to
// zero height lines, which is undesirable.
// See also https://github.com/facebook/docusaurus/pull/11565
function LineBreak() {
return <br />;
}
// Replaces single lines with '\n' by '' so that we don't end up with
// duplicate line breaks (the '\n' + the artificial <br/> above)
// see also https://github.com/facebook/docusaurus/pull/11565
function fixLineBreak(line: Token[]) {
const singleLineBreakToken =
line.length === 1 && line[0]!.content === '\n' ? line[0] : undefined;
if (singleLineBreakToken) {
return [{...singleLineBreakToken, content: ''}];
}
return line;
}
@ -35,7 +42,6 @@ export default function CodeBlockLine({
getTokenProps,
}: Props): ReactNode {
const line = fixLineBreak(lineProp);
const lineProps = getLineProps({
line,
className: clsx(classNames, showLineNumbers && styles.codeLine),
@ -51,7 +57,7 @@ export default function CodeBlockLine({
});
return (
<span {...lineProps}>
<div {...lineProps}>
{showLineNumbers ? (
<>
<span className={styles.codeLineNumber} />
@ -60,7 +66,7 @@ export default function CodeBlockLine({
) : (
lineTokens
)}
<br />
</span>
<LineBreak />
</div>
);
}

View File

@ -64,7 +64,7 @@ export default function DropdownNavbarItemDesktop({
{...props}
onClick={props.to ? undefined : (e) => e.preventDefault()}
onKeyDown={(e) => {
if (e.key === 'Enter') {
if (e.key === 'Enter' || e.key === ' ') {
e.preventDefault();
setShowDropdown(!showDropdown);
}

View File

@ -49,18 +49,6 @@ export default async function themeMermaid(): Promise<Plugin<void>> {
),
}),
],
// Workaround for weird Rspack/SWC issue
// See https://github.com/facebook/docusaurus/issues/11430
resolve: {
alias: {
...(elkLayoutEnabled
? {}
: {
'@mermaid-js/layout-elk': false,
}),
},
},
};
},
};

View File

@ -33,7 +33,7 @@
"copy:watch": "node ../../admin/scripts/copyUntypedFiles.js --watch"
},
"dependencies": {
"@docsearch/react": "^3.9.0 || ^4.1.0",
"@docsearch/react": "^3.9.0 || ^4.3.2",
"@docusaurus/core": "3.9.2",
"@docusaurus/logger": "3.9.2",
"@docusaurus/plugin-content-docs": "3.9.2",

View File

@ -436,5 +436,95 @@ describe('validateThemeConfig', () => {
});
});
});
describe('Ask AI suggestedQuestions', () => {
it('accepts suggestedQuestions as true', () => {
const algolia = {
appId: 'BH4D9OD16A',
indexName: 'index',
apiKey: 'apiKey',
askAi: {
assistantId: 'my-assistant-id',
suggestedQuestions: true,
},
} satisfies AlgoliaInput;
expect(testValidateThemeConfig(algolia)).toEqual({
algolia: {
...DEFAULT_CONFIG,
...algolia,
askAi: {
indexName: algolia.indexName,
apiKey: algolia.apiKey,
appId: algolia.appId,
assistantId: 'my-assistant-id',
suggestedQuestions: true,
},
},
});
});
it('accepts suggestedQuestions as false', () => {
const algolia = {
appId: 'BH4D9OD16A',
indexName: 'index',
apiKey: 'apiKey',
askAi: {
assistantId: 'my-assistant-id',
suggestedQuestions: false,
},
} satisfies AlgoliaInput;
expect(testValidateThemeConfig(algolia)).toEqual({
algolia: {
...DEFAULT_CONFIG,
...algolia,
askAi: {
indexName: algolia.indexName,
apiKey: algolia.apiKey,
appId: algolia.appId,
assistantId: 'my-assistant-id',
suggestedQuestions: false,
},
},
});
});
it('rejects invalid suggestedQuestions type', () => {
const algolia: AlgoliaInput = {
appId: 'BH4D9OD16A',
indexName: 'index',
apiKey: 'apiKey',
askAi: {
assistantId: 'my-assistant-id',
// @ts-expect-error: expected type error
suggestedQuestions: 'invalid-string',
},
};
expect(() =>
testValidateThemeConfig(algolia),
).toThrowErrorMatchingInlineSnapshot(
`""algolia.askAi.suggestedQuestions" must be a boolean"`,
);
});
it('rejects suggestedQuestions as number', () => {
const algolia: AlgoliaInput = {
appId: 'BH4D9OD16A',
indexName: 'index',
apiKey: 'apiKey',
askAi: {
assistantId: 'my-assistant-id',
// @ts-expect-error: expected type error
suggestedQuestions: 123,
},
};
expect(() =>
testValidateThemeConfig(algolia),
).toThrowErrorMatchingInlineSnapshot(
`""algolia.askAi.suggestedQuestions" must be a boolean"`,
);
});
});
});
});

View File

@ -91,7 +91,7 @@ export function useAlgoliaAskAi(props: DocSearchV4PropsLite): UseAskAiResult {
}, []);
const extraAskAiProps: UseAskAiResult['extraAskAiProps'] = {
askAi,
askAi: askAi as any,
canHandleAskAi,
isAskAiActive,
onAskAiToggle,

View File

@ -43,7 +43,7 @@ export function useSearchResultUrlProcessor(): (url: string) => string {
}
// Otherwise => transform to relative URL for SPA navigation
const relativeUrl = `${parsedURL.pathname + parsedURL.hash}`;
const relativeUrl = `${parsedURL.pathname}${parsedURL.search}${parsedURL.hash}`;
return withBaseUrl(
replacePathname(relativeUrl, replaceSearchResultPathname),

View File

@ -17,6 +17,7 @@ declare module '@docusaurus/theme-search-algolia' {
import type {FacetFilters} from 'algoliasearch/lite';
// The config after normalization (e.g. AskAI string -> object)
// This matches DocSearch v4.3+ AskAi configuration
export type AskAiConfig = {
indexName: string;
apiKey: string;
@ -25,6 +26,7 @@ declare module '@docusaurus/theme-search-algolia' {
searchParameters?: {
facetFilters?: FacetFilters;
};
suggestedQuestions?: boolean;
};
// DocSearch props that Docusaurus exposes directly through props forwarding

View File

@ -61,7 +61,7 @@ type DocSearchProps = Omit<
// extend DocSearchProps for v4 features
// TODO Docusaurus v4: cleanup after we drop support for DocSearch v3
interface DocSearchV4Props extends DocSearchProps {
interface DocSearchV4Props extends Omit<DocSearchProps, 'askAi'> {
indexName: string;
askAi?: ThemeConfigAlgolia['askAi'];
translations?: DocSearchTranslations;
@ -199,7 +199,7 @@ function useSearchParameters({
function DocSearch({externalUrlRegex, ...props}: DocSearchV4Props) {
const navigator = useNavigator({externalUrlRegex});
const searchParameters = useSearchParameters({...props});
const searchParameters = useSearchParameters({...props} as DocSearchProps);
const transformItems = useTransformItems(props);
const transformSearchClient = useTransformSearchClient();
@ -301,7 +301,7 @@ function DocSearch({externalUrlRegex, ...props}: DocSearchV4Props) {
resultsFooterComponent,
})}
placeholder={currentPlaceholder}
{...props}
{...(props as any)}
translations={props.translations?.modal ?? translations.modal}
searchParameters={searchParameters}
{...extraAskAiProps}
@ -312,9 +312,15 @@ function DocSearch({externalUrlRegex, ...props}: DocSearchV4Props) {
);
}
export default function SearchBar(): ReactNode {
export default function SearchBar(props: Partial<DocSearchV4Props>): ReactNode {
const {siteConfig} = useDocusaurusContext();
return (
<DocSearch {...(siteConfig.themeConfig.algolia as DocSearchV4Props)} />
);
const docSearchProps: DocSearchV4Props = {
...(siteConfig.themeConfig.algolia as DocSearchV4Props),
// Let props override theme config
// See https://github.com/facebook/docusaurus/pull/11581
...props,
};
return <DocSearch {...docSearchProps} />;
}

View File

@ -75,6 +75,7 @@ export const Schema = Joi.object<ThemeConfig>({
searchParameters: Joi.object({
facetFilters: FacetFiltersSchema.optional(),
}).optional(),
suggestedQuestions: Joi.boolean().optional(),
}),
)
.custom(

View File

@ -1,60 +1,60 @@
{
"theme.SearchBar.label": "Buscar",
"theme.SearchBar.label": "Procurar",
"theme.SearchBar.seeAll": "Ver todos os {count} resultados",
"theme.SearchModal.askAiScreen.afterToolCallText": "Searched for",
"theme.SearchModal.askAiScreen.copyButtonCopiedText": "Copied!",
"theme.SearchModal.askAiScreen.copyButtonText": "Copy",
"theme.SearchModal.askAiScreen.copyButtonTitle": "Copy",
"theme.SearchModal.askAiScreen.disclaimerText": "Answers are generated with AI which can make mistakes. Verify responses.",
"theme.SearchModal.askAiScreen.afterToolCallText": "Procurou por",
"theme.SearchModal.askAiScreen.copyButtonCopiedText": "Copiado!",
"theme.SearchModal.askAiScreen.copyButtonText": "Copiar",
"theme.SearchModal.askAiScreen.copyButtonTitle": "Copiar",
"theme.SearchModal.askAiScreen.disclaimerText": "Respostas geradas por IA podem cometer erros. Verifique.",
"theme.SearchModal.askAiScreen.dislikeButtonTitle": "Dislike",
"theme.SearchModal.askAiScreen.duringToolCallText": "Searching for ",
"theme.SearchModal.askAiScreen.duringToolCallText": "Procurando por ",
"theme.SearchModal.askAiScreen.likeButtonTitle": "Like",
"theme.SearchModal.askAiScreen.preToolCallText": "Searching...",
"theme.SearchModal.askAiScreen.relatedSourcesText": "Related sources",
"theme.SearchModal.askAiScreen.thanksForFeedbackText": "Thanks for your feedback!",
"theme.SearchModal.askAiScreen.thinkingText": "Thinking...",
"theme.SearchModal.askAiScreen.preToolCallText": "Procurando...",
"theme.SearchModal.askAiScreen.relatedSourcesText": "Resultados relacionados",
"theme.SearchModal.askAiScreen.thanksForFeedbackText": "Obrigado pelo seu feedback!",
"theme.SearchModal.askAiScreen.thinkingText": "Pensando...",
"theme.SearchModal.errorScreen.helpText": "Talvez você deva verificar sua conexão de rede.",
"theme.SearchModal.errorScreen.titleText": "Não foi possível obter resultados",
"theme.SearchModal.footer.backToSearchText": "Back to search",
"theme.SearchModal.footer.backToSearchText": "Voltar para pesquisa",
"theme.SearchModal.footer.closeKeyAriaLabel": "Tecla Esc",
"theme.SearchModal.footer.closeText": "fechar",
"theme.SearchModal.footer.navigateDownKeyAriaLabel": "Seta para baixo",
"theme.SearchModal.footer.navigateText": "navegar",
"theme.SearchModal.footer.navigateUpKeyAriaLabel": "Seta para cima",
"theme.SearchModal.footer.searchByText": "Esta busca utiliza",
"theme.SearchModal.footer.searchByText": "Esta pesquisa utiliza",
"theme.SearchModal.footer.selectKeyAriaLabel": "Tecla Enter",
"theme.SearchModal.footer.selectText": "selecionar",
"theme.SearchModal.footer.submitQuestionText": "Submit question",
"theme.SearchModal.footer.submitQuestionText": "Enviar pergunta",
"theme.SearchModal.noResultsScreen.noResultsText": "Nenhum resultado para",
"theme.SearchModal.noResultsScreen.reportMissingResultsLinkText": "Nos avise.",
"theme.SearchModal.noResultsScreen.reportMissingResultsText": "Você acha que esta busca deveria retornar resultados?",
"theme.SearchModal.noResultsScreen.suggestedQueryText": "Tente buscar por",
"theme.SearchModal.placeholder": "Buscar documentos",
"theme.SearchModal.resultsScreen.askAiPlaceholder": "Ask AI: ",
"theme.SearchModal.searchBox.backToKeywordSearchButtonAriaLabel": "Back to keyword search",
"theme.SearchModal.searchBox.backToKeywordSearchButtonText": "Back to keyword search",
"theme.SearchModal.noResultsScreen.reportMissingResultsText": "Você acha que esta pesquisa deveria retornar resultados?",
"theme.SearchModal.noResultsScreen.suggestedQueryText": "Tente procurar por",
"theme.SearchModal.placeholder": "Procurar documentos",
"theme.SearchModal.resultsScreen.askAiPlaceholder": "Pergunte para a IA: ",
"theme.SearchModal.searchBox.backToKeywordSearchButtonAriaLabel": "Voltar para a pesquisa por palavra-chave",
"theme.SearchModal.searchBox.backToKeywordSearchButtonText": "Voltar para a pesquisa por palavra-chave",
"theme.SearchModal.searchBox.cancelButtonText": "Cancelar",
"theme.SearchModal.searchBox.enterKeyHint": "search",
"theme.SearchModal.searchBox.enterKeyHint": "procurar",
"theme.SearchModal.searchBox.enterKeyHintAskAi": "enter",
"theme.SearchModal.searchBox.placeholderText": "Search docs",
"theme.SearchModal.searchBox.placeholderTextAskAi": "Ask another question...",
"theme.SearchModal.searchBox.placeholderTextAskAiStreaming": "Answering...",
"theme.SearchModal.searchBox.resetButtonTitle": "Limpar a busca",
"theme.SearchModal.searchBox.searchInputLabel": "Search",
"theme.SearchModal.searchBox.placeholderText": "Procurar na documentação",
"theme.SearchModal.searchBox.placeholderTextAskAi": "Pergunte outra coisa...",
"theme.SearchModal.searchBox.placeholderTextAskAiStreaming": "Respondendo...",
"theme.SearchModal.searchBox.resetButtonTitle": "Limpar a pesquisa",
"theme.SearchModal.searchBox.searchInputLabel": "Procurar",
"theme.SearchModal.startScreen.favoriteSearchesTitle": "Favorito",
"theme.SearchModal.startScreen.noRecentSearchesText": "Nenhuma busca recente",
"theme.SearchModal.startScreen.recentConversationsTitle": "Recent conversations",
"theme.SearchModal.startScreen.noRecentSearchesText": "Nenhuma pesquisa recente",
"theme.SearchModal.startScreen.recentConversationsTitle": "Conversas recentes",
"theme.SearchModal.startScreen.recentSearchesTitle": "Recente",
"theme.SearchModal.startScreen.removeFavoriteSearchButtonTitle": "Remover esta busca dos favoritos",
"theme.SearchModal.startScreen.removeFavoriteSearchButtonTitle": "Remover esta pesquisa dos favoritos",
"theme.SearchModal.startScreen.removeRecentConversationButtonTitle": "Remove this conversation from history",
"theme.SearchModal.startScreen.removeRecentSearchButtonTitle": "Remover esta busca do histórico",
"theme.SearchModal.startScreen.saveRecentSearchButtonTitle": "Salvar esta busca",
"theme.SearchModal.startScreen.removeRecentSearchButtonTitle": "Remover esta pesquisa do histórico",
"theme.SearchModal.startScreen.saveRecentSearchButtonTitle": "Salvar esta pesquisa",
"theme.SearchPage.algoliaLabel": "Desenvolvido por Algolia",
"theme.SearchPage.documentsFound.plurals": "Um documento encontrado|{count} documentos encontrados",
"theme.SearchPage.emptyResultsTitle": "Busca da documentação",
"theme.SearchPage.existingResultsTitle": "Resultado da busca por \"{query}\"",
"theme.SearchPage.emptyResultsTitle": "Pesquisa da documentação",
"theme.SearchPage.existingResultsTitle": "Resultado da pesquisa por \"{query}\"",
"theme.SearchPage.fetchingNewResults": "Trazendo novos resultados...",
"theme.SearchPage.inputLabel": "Buscar",
"theme.SearchPage.inputPlaceholder": "Digite sua busca aqui",
"theme.SearchPage.inputLabel": "Procurar",
"theme.SearchPage.inputPlaceholder": "Digite sua pesquisa aqui",
"theme.SearchPage.noResultsText": "Nenhum resultado foi encontrado"
}

View File

@ -33,6 +33,7 @@ export type FasterConfig = {
rspackBundler: boolean;
rspackPersistentCache: boolean;
ssgWorkerThreads: boolean;
gitEagerVcs: boolean;
};
export type FutureV4Config = {
@ -40,6 +41,53 @@ export type FutureV4Config = {
useCssCascadeLayers: boolean;
};
// VCS (Version Control System) info about a given change, e.g., a git commit.
// The agnostic term "VCS" is used instead of "git" to acknowledge the existence
// of other version control systems, and external systems like CMSs and i18n
// translation SaaS (e.g., Crowdin)
export type VcsChangeInfo = {timestamp: number; author: string};
export type VscInitializeParams = {
siteDir: string;
// TODO could it be useful to provide all plugins getPathsToWatch() here?
// this could give the opportunity to find out all VCS roots ahead of times
// this is mostly useful for multi-git-repo setups, can be added later
};
// VCS (Version Control System) config hooks to get file change info.
// This lets you override and customize the default Docusaurus behavior.
// This can be useful to optimize calls or when using something else than git
// See https://github.com/facebook/docusaurus/issues/11208
// See https://github.com/e18e/ecosystem-issues/issues/216
export type VcsConfig = {
/**
* Initialize the VCS system.
* This is notably useful to pre-read eagerly a full Git repository so that
* all the files first/last update info can be retrieved efficiently later
*
* Note: for now, this function is synchronous on purpose, it can be used to
* start warming up the VCS by reading eagerly, but we don't want to delay
* the rest of the Docusaurus start/build process. Instead of awaiting the
* init promise, you can create/store it and await it later during reads.
*
* @param params Initialization params that can be useful to warm up the VCS
*/
initialize: (params: VscInitializeParams) => void;
getFileCreationInfo: (filePath: string) => Promise<VcsChangeInfo | null>;
getFileLastUpdateInfo: (filePath: string) => Promise<VcsChangeInfo | null>;
};
/**
* List of pre-built VcsConfig that Docusaurus provides.
*/
export type VcsPreset =
| 'git-ad-hoc'
| 'git-eager'
| 'hardcoded'
| 'disabled'
| 'default-v1'
| 'default-v2';
export type FutureConfig = {
/**
* Turns v4 future flags on
@ -50,6 +98,8 @@ export type FutureConfig = {
experimental_storage: StorageConfig;
experimental_vcs: VcsConfig;
/**
* Docusaurus can work with 2 router types.
*
@ -367,6 +417,7 @@ export type Config = Overwrite<
{
v4?: boolean | Partial<FutureV4Config>;
experimental_faster?: boolean | Partial<FasterConfig>;
experimental_vcs?: VcsPreset | VcsConfig | boolean;
}
>;
}

View File

@ -13,6 +13,10 @@ export {
FutureV4Config,
FasterConfig,
StorageConfig,
VcsConfig,
VcsPreset,
VcsChangeInfo,
VscInitializeParams,
Config,
} from './config';

View File

@ -104,6 +104,8 @@ export type HtmlTagObject = {
tagName: string;
/** The inner HTML */
innerHTML?: string;
/** Allow custom html elements, e.g. `<custom-element>` */
customElement?: boolean;
};
export type HtmlTags = string | HtmlTagObject | (string | HtmlTagObject)[];

View File

@ -22,7 +22,7 @@
"@docusaurus/types": "3.9.2",
"@docusaurus/utils-common": "3.9.2",
"escape-string-regexp": "^4.0.0",
"execa": "5.1.1",
"execa": "^5.1.1",
"file-loader": "^6.2.0",
"fs-extra": "^11.1.1",
"github-slugger": "^1.5.0",

View File

@ -1 +0,0 @@
# Hoo hoo, if this path tricks you...

View File

@ -1,7 +0,0 @@
---
id: hello
title: Hello, World !
slug: /
---
Hello

View File

@ -1,159 +0,0 @@
/**
* Copyright (c) Facebook, Inc. and its affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree.
*/
import fs from 'fs-extra';
import path from 'path';
import {createTempRepo} from '@testing-utils/git';
import {FileNotTrackedError, getFileCommitDate} from '../gitUtils';
import {getGitLastUpdate} from '../lastUpdateUtils';
/* eslint-disable no-restricted-properties */
function initializeTempRepo() {
const {repoDir, git} = createTempRepo();
fs.writeFileSync(path.join(repoDir, 'test.txt'), 'Some content');
git.commit(
'Create test.txt',
'2020-06-19',
'Caroline <caroline@jc-verse.com>',
);
fs.writeFileSync(path.join(repoDir, 'test.txt'), 'Updated content');
git.commit(
'Update test.txt',
'2020-06-20',
'Josh-Cena <josh-cena@jc-verse.com>',
);
fs.writeFileSync(path.join(repoDir, 'test.txt'), 'Updated content (2)');
fs.writeFileSync(path.join(repoDir, 'moved.txt'), 'This file is moved');
git.commit(
'Update test.txt again, create moved.txt',
'2020-09-13',
'Caroline <caroline@jc-verse.com>',
);
fs.moveSync(path.join(repoDir, 'moved.txt'), path.join(repoDir, 'dest.txt'));
git.commit(
'Rename moved.txt to dest.txt',
'2020-11-13',
'Josh-Cena <josh-cena@jc-verse.com>',
);
fs.writeFileSync(path.join(repoDir, 'untracked.txt'), "I'm untracked");
return repoDir;
}
describe('getFileCommitDate', () => {
const repoDir = initializeTempRepo();
it('returns earliest commit date', async () => {
await expect(
getFileCommitDate(path.join(repoDir, 'test.txt'), {}),
).resolves.toEqual({
date: new Date('2020-06-19'),
timestamp: new Date('2020-06-19').getTime(),
});
await expect(
getFileCommitDate(path.join(repoDir, 'dest.txt'), {}),
).resolves.toEqual({
date: new Date('2020-09-13'),
timestamp: new Date('2020-09-13').getTime(),
});
});
it('returns latest commit date', async () => {
await expect(
getFileCommitDate(path.join(repoDir, 'test.txt'), {age: 'newest'}),
).resolves.toEqual({
date: new Date('2020-09-13'),
timestamp: new Date('2020-09-13').getTime(),
});
await expect(
getFileCommitDate(path.join(repoDir, 'dest.txt'), {age: 'newest'}),
).resolves.toEqual({
date: new Date('2020-11-13'),
timestamp: new Date('2020-11-13').getTime(),
});
});
it('returns latest commit date with author', async () => {
await expect(
getFileCommitDate(path.join(repoDir, 'test.txt'), {
age: 'oldest',
includeAuthor: true,
}),
).resolves.toEqual({
date: new Date('2020-06-19'),
timestamp: new Date('2020-06-19').getTime(),
author: 'Caroline',
});
await expect(
getFileCommitDate(path.join(repoDir, 'dest.txt'), {
age: 'oldest',
includeAuthor: true,
}),
).resolves.toEqual({
date: new Date('2020-09-13'),
timestamp: new Date('2020-09-13').getTime(),
author: 'Caroline',
});
});
it('returns earliest commit date with author', async () => {
await expect(
getFileCommitDate(path.join(repoDir, 'test.txt'), {
age: 'newest',
includeAuthor: true,
}),
).resolves.toEqual({
date: new Date('2020-09-13'),
timestamp: new Date('2020-09-13').getTime(),
author: 'Caroline',
});
await expect(
getFileCommitDate(path.join(repoDir, 'dest.txt'), {
age: 'newest',
includeAuthor: true,
}),
).resolves.toEqual({
date: new Date('2020-11-13'),
timestamp: new Date('2020-11-13').getTime(),
author: 'Josh-Cena',
});
});
it('throws custom error when file is not tracked', async () => {
await expect(() =>
getFileCommitDate(path.join(repoDir, 'untracked.txt'), {
age: 'newest',
includeAuthor: true,
}),
).rejects.toThrow(FileNotTrackedError);
});
it('throws when file not found', async () => {
await expect(() =>
getFileCommitDate(path.join(repoDir, 'nonexistent.txt'), {
age: 'newest',
includeAuthor: true,
}),
).rejects.toThrow(
/Failed to retrieve git history for ".*nonexistent.txt" because the file does not exist./,
);
});
it('multiple files not tracked by git', async () => {
const consoleMock = jest
.spyOn(console, 'warn')
.mockImplementation(() => {});
const tempFilePath1 = path.join(repoDir, 'file1.md');
const tempFilePath2 = path.join(repoDir, 'file2.md');
await fs.writeFile(tempFilePath1, 'Lorem ipsum :)');
await fs.writeFile(tempFilePath2, 'Lorem ipsum :)');
// TODO this is not the correct place to test "getGitLastUpdate"
await expect(getGitLastUpdate(tempFilePath1)).resolves.toBeNull();
await expect(getGitLastUpdate(tempFilePath2)).resolves.toBeNull();
expect(consoleMock).toHaveBeenCalledTimes(1);
expect(consoleMock).toHaveBeenLastCalledWith(
expect.stringMatching(/not tracked by git./),
);
await fs.unlink(tempFilePath1);
await fs.unlink(tempFilePath2);
});
});

View File

@ -5,162 +5,85 @@
* LICENSE file in the root directory of this source tree.
*/
import {jest} from '@jest/globals';
import fs from 'fs-extra';
import path from 'path';
import {createTempRepo} from '@testing-utils/git';
import execa from 'execa';
import {readLastUpdateData} from '../lastUpdateUtils';
import {TEST_VCS} from '../vcs/vcs';
import {
getGitLastUpdate,
LAST_UPDATE_FALLBACK,
LAST_UPDATE_UNTRACKED_GIT_FILEPATH,
readLastUpdateData,
} from '../lastUpdateUtils';
import type {FrontMatterLastUpdate} from '../lastUpdateUtils';
describe('getGitLastUpdate', () => {
const {repoDir} = createTempRepo();
const existingFilePath = path.join(
__dirname,
'__fixtures__/simple-site/hello.md',
);
it('existing test file in repository with Git timestamp', async () => {
const lastUpdateData = await getGitLastUpdate(existingFilePath);
expect(lastUpdateData).not.toBeNull();
const {lastUpdatedAt, lastUpdatedBy} = lastUpdateData!;
expect(lastUpdatedBy).not.toBeNull();
expect(typeof lastUpdatedBy).toBe('string');
expect(lastUpdatedAt).not.toBeNull();
expect(typeof lastUpdatedAt).toBe('number');
});
it('existing test file with spaces in path', async () => {
const filePathWithSpace = path.join(
__dirname,
'__fixtures__/simple-site/doc with space.md',
);
const lastUpdateData = await getGitLastUpdate(filePathWithSpace);
expect(lastUpdateData).not.toBeNull();
const {lastUpdatedBy, lastUpdatedAt} = lastUpdateData!;
expect(lastUpdatedBy).not.toBeNull();
expect(typeof lastUpdatedBy).toBe('string');
expect(lastUpdatedAt).not.toBeNull();
expect(typeof lastUpdatedAt).toBe('number');
});
it('non-existing file', async () => {
const consoleMock = jest
.spyOn(console, 'warn')
.mockImplementation(() => {});
const nonExistingFileName = '.nonExisting';
const nonExistingFilePath = path.join(
__dirname,
'__fixtures__',
nonExistingFileName,
);
await expect(getGitLastUpdate(nonExistingFilePath)).rejects.toThrow(
/An error occurred when trying to get the last update date/,
);
expect(consoleMock).toHaveBeenCalledTimes(0);
consoleMock.mockRestore();
});
it('git does not exist', async () => {
const mock = jest.spyOn(execa, 'sync').mockImplementationOnce(() => {
throw new Error('Git does not exist');
});
const consoleMock = jest
.spyOn(console, 'warn')
.mockImplementation(() => {});
const lastUpdateData = await getGitLastUpdate(existingFilePath);
expect(lastUpdateData).toBeNull();
expect(consoleMock).toHaveBeenLastCalledWith(
expect.stringMatching(
/.*\[WARNING\].* Sorry, the last update options require Git\..*/,
),
);
consoleMock.mockRestore();
mock.mockRestore();
});
it('temporary created file that is not tracked by git', async () => {
const consoleMock = jest
.spyOn(console, 'warn')
.mockImplementation(() => {});
const tempFilePath = path.join(repoDir, 'file.md');
await fs.writeFile(tempFilePath, 'Lorem ipsum :)');
await expect(getGitLastUpdate(tempFilePath)).resolves.toBeNull();
expect(consoleMock).toHaveBeenCalledTimes(1);
expect(consoleMock).toHaveBeenLastCalledWith(
expect.stringMatching(/not tracked by git./),
);
await fs.unlink(tempFilePath);
});
});
describe('readLastUpdateData', () => {
const testDate = '2021-01-01';
const testTimestamp = new Date(testDate).getTime();
const testAuthor = 'ozaki';
async function readData(
filePath: string,
options: Parameters<typeof readLastUpdateData>[1],
lastUpdateFrontMatter: Parameters<typeof readLastUpdateData>[2],
) {
return readLastUpdateData(
filePath,
options,
lastUpdateFrontMatter,
TEST_VCS,
);
}
describe('on untracked Git file', () => {
function test(lastUpdateFrontMatter: FrontMatterLastUpdate | undefined) {
return readLastUpdateData(
LAST_UPDATE_UNTRACKED_GIT_FILEPATH,
function readUntrackedFile(
lastUpdateFrontMatter: FrontMatterLastUpdate | undefined,
) {
return readData(
TEST_VCS.UNTRACKED_FILE_PATH,
{showLastUpdateAuthor: true, showLastUpdateTime: true},
lastUpdateFrontMatter,
);
}
it('reads null at/by from Git', async () => {
const {lastUpdatedAt, lastUpdatedBy} = await test({});
const {lastUpdatedAt, lastUpdatedBy} = await readUntrackedFile({});
expect(lastUpdatedAt).toBeNull();
expect(lastUpdatedBy).toBeNull();
});
it('reads null at from Git and author from front matter', async () => {
const {lastUpdatedAt, lastUpdatedBy} = await test({author: testAuthor});
const {lastUpdatedAt, lastUpdatedBy} = await readUntrackedFile({
author: testAuthor,
});
expect(lastUpdatedAt).toBeNull();
expect(lastUpdatedBy).toEqual(testAuthor);
});
it('reads null by from Git and date from front matter', async () => {
const {lastUpdatedAt, lastUpdatedBy} = await test({date: testDate});
const {lastUpdatedAt, lastUpdatedBy} = await readUntrackedFile({
date: testDate,
});
expect(lastUpdatedBy).toBeNull();
expect(lastUpdatedAt).toEqual(testTimestamp);
});
});
it('read last time show author time', async () => {
const {lastUpdatedAt, lastUpdatedBy} = await readLastUpdateData(
const {lastUpdatedAt, lastUpdatedBy} = await readData(
'',
{showLastUpdateAuthor: true, showLastUpdateTime: true},
{date: testDate},
);
expect(lastUpdatedAt).toEqual(testTimestamp);
expect(lastUpdatedBy).toBe(LAST_UPDATE_FALLBACK.lastUpdatedBy);
expect(lastUpdatedBy).toBe(TEST_VCS.LAST_UPDATE_INFO.author);
});
it('read last author show author time', async () => {
const {lastUpdatedAt, lastUpdatedBy} = await readLastUpdateData(
const {lastUpdatedAt, lastUpdatedBy} = await readData(
'',
{showLastUpdateAuthor: true, showLastUpdateTime: true},
{author: testAuthor},
);
expect(lastUpdatedBy).toEqual(testAuthor);
expect(lastUpdatedAt).toBe(LAST_UPDATE_FALLBACK.lastUpdatedAt);
expect(lastUpdatedAt).toBe(TEST_VCS.LAST_UPDATE_INFO.timestamp);
});
it('read last all show author time', async () => {
const {lastUpdatedAt, lastUpdatedBy} = await readLastUpdateData(
const {lastUpdatedAt, lastUpdatedBy} = await readData(
'',
{showLastUpdateAuthor: true, showLastUpdateTime: true},
{author: testAuthor, date: testDate},
@ -170,7 +93,7 @@ describe('readLastUpdateData', () => {
});
it('read last default show none', async () => {
const lastUpdate = await readLastUpdateData(
const lastUpdate = await readData(
'',
{showLastUpdateAuthor: false, showLastUpdateTime: false},
{},
@ -179,7 +102,7 @@ describe('readLastUpdateData', () => {
});
it('read last author show none', async () => {
const lastUpdate = await readLastUpdateData(
const lastUpdate = await readData(
'',
{showLastUpdateAuthor: false, showLastUpdateTime: false},
{author: testAuthor},
@ -188,17 +111,17 @@ describe('readLastUpdateData', () => {
});
it('read last time show author', async () => {
const {lastUpdatedAt, lastUpdatedBy} = await readLastUpdateData(
const {lastUpdatedAt, lastUpdatedBy} = await readData(
'',
{showLastUpdateAuthor: true, showLastUpdateTime: false},
{date: testDate},
);
expect(lastUpdatedBy).toBe(LAST_UPDATE_FALLBACK.lastUpdatedBy);
expect(lastUpdatedBy).toBe(TEST_VCS.LAST_UPDATE_INFO.author);
expect(lastUpdatedAt).toBeUndefined();
});
it('read last author show author', async () => {
const {lastUpdatedAt, lastUpdatedBy} = await readLastUpdateData(
const {lastUpdatedAt, lastUpdatedBy} = await readData(
'',
{showLastUpdateAuthor: true, showLastUpdateTime: false},
{author: testAuthor},
@ -208,17 +131,17 @@ describe('readLastUpdateData', () => {
});
it('read last default show author default', async () => {
const {lastUpdatedAt, lastUpdatedBy} = await readLastUpdateData(
const {lastUpdatedAt, lastUpdatedBy} = await readData(
'',
{showLastUpdateAuthor: true, showLastUpdateTime: false},
{},
);
expect(lastUpdatedBy).toBe(LAST_UPDATE_FALLBACK.lastUpdatedBy);
expect(lastUpdatedBy).toBe(TEST_VCS.LAST_UPDATE_INFO.author);
expect(lastUpdatedAt).toBeUndefined();
});
it('read last time show time', async () => {
const {lastUpdatedAt, lastUpdatedBy} = await readLastUpdateData(
const {lastUpdatedAt, lastUpdatedBy} = await readData(
'',
{showLastUpdateAuthor: false, showLastUpdateTime: true},
{date: testDate},
@ -228,17 +151,17 @@ describe('readLastUpdateData', () => {
});
it('read last author show time', async () => {
const {lastUpdatedAt, lastUpdatedBy} = await readLastUpdateData(
const {lastUpdatedAt, lastUpdatedBy} = await readData(
'',
{showLastUpdateAuthor: false, showLastUpdateTime: true},
{author: testAuthor},
);
expect(lastUpdatedBy).toBeUndefined();
expect(lastUpdatedAt).toEqual(LAST_UPDATE_FALLBACK.lastUpdatedAt);
expect(lastUpdatedAt).toEqual(TEST_VCS.LAST_UPDATE_INFO.timestamp);
});
it('read last author show time only - both front matter', async () => {
const {lastUpdatedAt, lastUpdatedBy} = await readLastUpdateData(
const {lastUpdatedAt, lastUpdatedBy} = await readData(
'',
{showLastUpdateAuthor: false, showLastUpdateTime: true},
{author: testAuthor, date: testDate},
@ -248,7 +171,7 @@ describe('readLastUpdateData', () => {
});
it('read last author show author only - both front matter', async () => {
const {lastUpdatedAt, lastUpdatedBy} = await readLastUpdateData(
const {lastUpdatedAt, lastUpdatedBy} = await readData(
'',
{showLastUpdateAuthor: true, showLastUpdateTime: false},
{author: testAuthor, date: testDate},

View File

@ -1,200 +0,0 @@
/**
* Copyright (c) Facebook, Inc. and its affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree.
*/
import path from 'path';
import fs from 'fs-extra';
import os from 'os';
import _ from 'lodash';
import execa from 'execa';
import PQueue from 'p-queue';
// Quite high/conservative concurrency value (it was previously "Infinity")
// See https://github.com/facebook/docusaurus/pull/10915
const DefaultGitCommandConcurrency =
// TODO Docusaurus v4: bump node, availableParallelism() now always exists
(typeof os.availableParallelism === 'function'
? os.availableParallelism()
: os.cpus().length) * 4;
const GitCommandConcurrencyEnv = process.env.DOCUSAURUS_GIT_COMMAND_CONCURRENCY
? parseInt(process.env.DOCUSAURUS_GIT_COMMAND_CONCURRENCY, 10)
: undefined;
const GitCommandConcurrency =
GitCommandConcurrencyEnv && GitCommandConcurrencyEnv > 0
? GitCommandConcurrencyEnv
: DefaultGitCommandConcurrency;
// We use a queue to avoid running too many concurrent Git commands at once
// See https://github.com/facebook/docusaurus/issues/10348
const GitCommandQueue = new PQueue({
concurrency: GitCommandConcurrency,
});
const realHasGitFn = () => {
try {
return execa.sync('git', ['--version']).exitCode === 0;
} catch (error) {
return false;
}
};
// The hasGit call is synchronous IO so we memoize it
// The user won't install Git in the middle of a build anyway...
const hasGit =
process.env.NODE_ENV === 'test' ? realHasGitFn : _.memoize(realHasGitFn);
/** Custom error thrown when git is not found in `PATH`. */
export class GitNotFoundError extends Error {}
/** Custom error thrown when the current file is not tracked by git. */
export class FileNotTrackedError extends Error {}
/**
* Fetches the git history of a file and returns a relevant commit date.
* It gets the commit date instead of author date so that amended commits
* can have their dates updated.
*
* @throws {@link GitNotFoundError} If git is not found in `PATH`.
* @throws {@link FileNotTrackedError} If the current file is not tracked by git.
* @throws Also throws when `git log` exited with non-zero, or when it outputs
* unexpected text.
*/
export async function getFileCommitDate(
/** Absolute path to the file. */
file: string,
args: {
/**
* `"oldest"` is the commit that added the file, following renames;
* `"newest"` is the last commit that edited the file.
*/
age?: 'oldest' | 'newest';
/** Use `includeAuthor: true` to get the author information as well. */
includeAuthor?: false;
},
): Promise<{
/** Relevant commit date. */
date: Date;
/** Timestamp returned from git, converted to **milliseconds**. */
timestamp: number;
}>;
/**
* Fetches the git history of a file and returns a relevant commit date.
* It gets the commit date instead of author date so that amended commits
* can have their dates updated.
*
* @throws {@link GitNotFoundError} If git is not found in `PATH`.
* @throws {@link FileNotTrackedError} If the current file is not tracked by git.
* @throws Also throws when `git log` exited with non-zero, or when it outputs
* unexpected text.
*/
export async function getFileCommitDate(
/** Absolute path to the file. */
file: string,
args: {
/**
* `"oldest"` is the commit that added the file, following renames;
* `"newest"` is the last commit that edited the file.
*/
age?: 'oldest' | 'newest';
includeAuthor: true;
},
): Promise<{
/** Relevant commit date. */
date: Date;
/** Timestamp returned from git, converted to **milliseconds**. */
timestamp: number;
/** The author's name, as returned from git. */
author: string;
}>;
export async function getFileCommitDate(
file: string,
{
age = 'oldest',
includeAuthor = false,
}: {
age?: 'oldest' | 'newest';
includeAuthor?: boolean;
},
): Promise<{
date: Date;
timestamp: number;
author?: string;
}> {
if (!hasGit()) {
throw new GitNotFoundError(
`Failed to retrieve git history for "${file}" because git is not installed.`,
);
}
if (!(await fs.pathExists(file))) {
throw new Error(
`Failed to retrieve git history for "${file}" because the file does not exist.`,
);
}
// We add a "RESULT:" prefix to make parsing easier
// See why: https://github.com/facebook/docusaurus/pull/10022
const resultFormat = includeAuthor ? 'RESULT:%ct,%an' : 'RESULT:%ct';
const args = [
`--format=${resultFormat}`,
'--max-count=1',
age === 'oldest' ? '--follow --diff-filter=A' : undefined,
]
.filter(Boolean)
.join(' ');
const command = `git -c log.showSignature=false log ${args} -- "${path.basename(
file,
)}"`;
const result = (await GitCommandQueue.add(() => {
return execa(command, {
cwd: path.dirname(file),
shell: true,
});
}))!;
if (result.exitCode !== 0) {
throw new Error(
`Failed to retrieve the git history for file "${file}" with exit code ${result.exitCode}: ${result.stderr}`,
);
}
// We only parse the output line starting with our "RESULT:" prefix
// See why https://github.com/facebook/docusaurus/pull/10022
const regex = includeAuthor
? /(?:^|\n)RESULT:(?<timestamp>\d+),(?<author>.+)(?:$|\n)/
: /(?:^|\n)RESULT:(?<timestamp>\d+)(?:$|\n)/;
const output = result.stdout.trim();
if (!output) {
throw new FileNotTrackedError(
`Failed to retrieve the git history for file "${file}" because the file is not tracked by git.`,
);
}
const match = output.match(regex);
if (!match) {
throw new Error(
`Failed to retrieve the git history for file "${file}" with unexpected output: ${output}`,
);
}
const timestampInSeconds = Number(match.groups!.timestamp);
const timestamp = timestampInSeconds * 1_000;
const date = new Date(timestamp);
if (includeAuthor) {
return {date, timestamp, author: match.groups!.author!};
}
return {date, timestamp};
}

View File

@ -25,10 +25,12 @@ export {
} from './constants';
export {generate, readOutputHTMLFile} from './emitUtils';
export {
// TODO Docusaurus v4: remove these legacy exports,
// they are only kept for retro-compatibility
getFileCommitDate,
FileNotTrackedError,
GitNotFoundError,
} from './gitUtils';
} from './vcs/gitUtils';
export {
mergeTranslations,
updateTranslationFileMessages,
@ -121,12 +123,11 @@ export {askPreferredLanguage} from './cliUtils';
export {flattenRoutes} from './routeUtils';
export {
getGitLastUpdate,
getLastUpdate,
readLastUpdateData,
LAST_UPDATE_FALLBACK,
type LastUpdateData,
type FrontMatterLastUpdate,
} from './lastUpdateUtils';
export {VcsPresetNames, getVcsPreset, TEST_VCS} from './vcs/vcs';
export {normalizeTags, reportInlineTags} from './tags';

View File

@ -6,13 +6,9 @@
*/
import _ from 'lodash';
import logger from '@docusaurus/logger';
import {
FileNotTrackedError,
GitNotFoundError,
getFileCommitDate,
} from './gitUtils';
import type {PluginOptions} from '@docusaurus/types';
import {getVcsPreset} from './vcs/vcs';
import type {PluginOptions, VcsConfig} from '@docusaurus/types';
export type LastUpdateData = {
/**
@ -29,72 +25,6 @@ export type LastUpdateData = {
lastUpdatedBy: string | undefined | null;
};
let showedGitRequirementError = false;
let showedFileNotTrackedError = false;
export async function getGitLastUpdate(
filePath: string,
): Promise<LastUpdateData | null> {
if (!filePath) {
return null;
}
// Wrap in try/catch in case the shell commands fail
// (e.g. project doesn't use Git, etc).
try {
const result = await getFileCommitDate(filePath, {
age: 'newest',
includeAuthor: true,
});
return {lastUpdatedAt: result.timestamp, lastUpdatedBy: result.author};
} catch (err) {
if (err instanceof GitNotFoundError) {
if (!showedGitRequirementError) {
logger.warn('Sorry, the last update options require Git.');
showedGitRequirementError = true;
}
} else if (err instanceof FileNotTrackedError) {
if (!showedFileNotTrackedError) {
logger.warn(
'Cannot infer the update date for some files, as they are not tracked by git.',
);
showedFileNotTrackedError = true;
}
} else {
throw new Error(
`An error occurred when trying to get the last update date`,
{cause: err},
);
}
return null;
}
}
export const LAST_UPDATE_FALLBACK: LastUpdateData = {
lastUpdatedAt: 1539502055000,
lastUpdatedBy: 'Author',
};
// Not proud of this, but convenient for tests :/
export const LAST_UPDATE_UNTRACKED_GIT_FILEPATH = `file/path/${Math.random()}.mdx`;
export async function getLastUpdate(
filePath: string,
): Promise<LastUpdateData | null> {
if (filePath === LAST_UPDATE_UNTRACKED_GIT_FILEPATH) {
return null;
}
if (
process.env.NODE_ENV !== 'production' ||
process.env.DOCUSAURUS_DISABLE_LAST_UPDATE === 'true'
) {
// Use fake data in dev/test for faster development.
return LAST_UPDATE_FALLBACK;
}
return getGitLastUpdate(filePath);
}
type LastUpdateOptions = Pick<
PluginOptions,
'showLastUpdateAuthor' | 'showLastUpdateTime'
@ -109,11 +39,21 @@ export type FrontMatterLastUpdate = {
date?: Date | string;
};
// TODO Docusaurus v4: refactor/rename, make it clear this fn is only
// for Markdown files with front matter shared by content plugin
export async function readLastUpdateData(
filePath: string,
options: LastUpdateOptions,
lastUpdateFrontMatter: FrontMatterLastUpdate | undefined,
vcsParam: Pick<VcsConfig, 'getFileLastUpdateInfo'>,
): Promise<LastUpdateData> {
// We fallback to the default VSC config at runtime on purpose
// It preserves retro-compatibility if a third-party plugin imports it
// This also ensures unit tests keep working without extra setup
// We still want to ensure type safety by requiring the VCS param
// TODO Docusaurus v4: refactor all these Git read APIs
const vcs = vcsParam ?? getVcsPreset('default-v1');
const {showLastUpdateAuthor, showLastUpdateTime} = options;
if (!showLastUpdateAuthor && !showLastUpdateTime) {
@ -128,14 +68,16 @@ export async function readLastUpdateData(
// We try to minimize git last update calls
// We call it at most once
// If all the data is provided as front matter, we do not call it
const getLastUpdateMemoized = _.memoize(() => getLastUpdate(filePath));
const getLastUpdateMemoized = _.memoize(() =>
vcs.getFileLastUpdateInfo(filePath),
);
const getLastUpdateBy = () =>
getLastUpdateMemoized().then((update) => {
// Important, see https://github.com/facebook/docusaurus/pull/11211
if (update === null) {
return null;
}
return update?.lastUpdatedBy;
return update?.author;
});
const getLastUpdateAt = () =>
getLastUpdateMemoized().then((update) => {
@ -143,7 +85,7 @@ export async function readLastUpdateData(
if (update === null) {
return null;
}
return update?.lastUpdatedAt;
return update?.timestamp;
});
const lastUpdatedBy = showLastUpdateAuthor

View File

@ -0,0 +1 @@
A site fixture with files versioned on Git.

View File

@ -0,0 +1 @@
Blog 1

View File

@ -0,0 +1,3 @@
This is a partial in file/folder starting with _:
It should be excluded by default

View File

@ -0,0 +1 @@
Doc with space in name

View File

@ -0,0 +1 @@
Doc 1

View File

@ -0,0 +1 @@
Doc 2

View File

@ -0,0 +1,723 @@
/**
* Copyright (c) Facebook, Inc. and its affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree.
*/
import fs from 'fs-extra';
import path from 'path';
import os from 'os';
import execa from 'execa';
import {
FileNotTrackedError,
getFileCommitDate,
getGitLastUpdate,
getGitCreation,
getGitRepoRoot,
getGitSuperProjectRoot,
getGitSubmodulePaths,
getGitAllRepoRoots,
getGitRepositoryFilesInfo,
} from '../gitUtils';
class Git {
private constructor(private dir: string) {
this.dir = dir;
}
private static async runOptimisticGitCommand({
cwd,
cmd,
args,
options,
}: {
cwd: string;
args: string[];
cmd: string;
options?: execa.Options;
}): Promise<execa.ExecaReturnValue> {
const res = await execa(cmd, args, {
cwd,
silent: true,
shell: true,
...options,
});
if (res.exitCode !== 0) {
throw new Error(
`Git command failed with code ${res.exitCode}: ${cmd} ${args.join(
' ',
)}`,
);
}
return res;
}
static async initializeRepo(dir: string): Promise<Git> {
await Git.runOptimisticGitCommand({
cmd: 'git',
args: ['init'],
cwd: dir,
});
await Git.runOptimisticGitCommand({
cmd: 'git',
args: ['config', 'user.email', '"test@example.com"'],
cwd: dir,
});
await Git.runOptimisticGitCommand({
cmd: 'git',
args: ['config', 'user.name', '"Test"'],
cwd: dir,
});
await Git.runOptimisticGitCommand({
cmd: 'git',
args: ['commit', '--allow-empty', '-m "First commit"'],
cwd: dir,
});
return new Git(dir);
}
async runOptimisticGitCommand(
cmd: string,
args?: string[],
options?: execa.Options,
): Promise<execa.ExecaReturnValue> {
return Git.runOptimisticGitCommand({cwd: this.dir, cmd, args, options});
}
async add(filePath: string): Promise<void> {
await this.runOptimisticGitCommand('git', ['add', filePath]);
}
async addAll(): Promise<void> {
await this.runOptimisticGitCommand('git', ['add', '.']);
}
async commit(msg: string, date: string, author: string): Promise<void> {
await this.runOptimisticGitCommand(
`git`,
[
'commit',
`-m "${msg}"`,
`--date "${date}T00:00:00Z"`,
`--author "${author}"`,
],
{env: {GIT_COMMITTER_DATE: `${date}T00:00:00Z`}},
);
}
async commitFile(
filePath: string,
{
fileContent,
commitMessage,
commitDate,
commitAuthor,
}: {
fileContent?: string;
commitMessage?: string;
commitDate?: string;
commitAuthor?: string;
} = {},
): Promise<void> {
await fs.ensureDir(path.join(this.dir, path.dirname(filePath)));
await fs.writeFile(
path.join(this.dir, filePath),
fileContent ?? `Content of ${filePath}`,
);
await this.add(filePath);
await this.commit(
commitMessage ?? `Create ${filePath}`,
commitDate ?? '2020-06-19',
commitAuthor ?? 'Seb <seb@example.com>',
);
}
async addSubmodule(name: string, repoPath: string): Promise<void> {
return this.runOptimisticGitCommand('git', [
'-c protocol.file.allow=always',
'submodule',
'add',
repoPath,
name,
]);
}
async defineSubmodules(submodules: {[name: string]: string}): Promise<void> {
for (const entry of Object.entries(submodules)) {
await this.addSubmodule(entry[0], entry[1]);
}
await this.runOptimisticGitCommand('git', [
'submodule',
'update',
'--init',
'--recursive',
]);
}
}
async function createGitRepoEmpty(): Promise<{repoDir: string; git: Git}> {
let repoDir = await fs.mkdtemp(path.join(os.tmpdir(), 'git-test-repo'));
repoDir = await fs.realpath.native(repoDir);
const git = await Git.initializeRepo(repoDir);
return {repoDir, git};
}
describe('commit info APIs', () => {
async function createGitRepoTestFixture() {
const {repoDir, git} = await createGitRepoEmpty();
await git.commitFile('test.txt', {
fileContent: 'Some content',
commitMessage: 'Create test.txt',
commitDate: '2020-06-19',
commitAuthor: 'Caroline <caroline@example.com>',
});
await git.commitFile('test.txt', {
fileContent: 'Updated content',
commitMessage: 'Update test.txt',
commitDate: '2020-06-20',
commitAuthor: 'Josh-Cena <josh-cena@example.com>',
});
await fs.writeFile(path.join(repoDir, 'test.txt'), 'Updated content (2)');
await fs.writeFile(path.join(repoDir, 'moved.txt'), 'This file is moved');
await git.addAll();
await git.commit(
'Update test.txt again, create moved.txt',
'2020-09-13',
'Robert <robert@example.com>',
);
await fs.move(
path.join(repoDir, 'moved.txt'),
path.join(repoDir, 'dest.txt'),
);
await git.addAll();
await git.commit(
'Rename moved.txt to dest.txt',
'2020-11-13',
'Seb <seb@example.com>',
);
await fs.writeFile(path.join(repoDir, 'untracked.txt'), "I'm untracked");
return repoDir;
}
// Create the repo only once for all tests => faster tests
const repoDirPromise = createGitRepoTestFixture();
describe('getFileCommitDate', () => {
it('returns latest commit date with author', async () => {
const repoDir = await repoDirPromise;
await expect(
getFileCommitDate(path.join(repoDir, 'test.txt'), {
age: 'oldest',
includeAuthor: true,
}),
).resolves.toEqual({
date: new Date('2020-06-19'),
timestamp: new Date('2020-06-19').getTime(),
author: 'Caroline',
});
await expect(
getFileCommitDate(path.join(repoDir, 'dest.txt'), {
age: 'oldest',
includeAuthor: true,
}),
).resolves.toEqual({
date: new Date('2020-09-13'),
timestamp: new Date('2020-09-13').getTime(),
author: 'Robert',
});
});
it('returns earliest commit date with author', async () => {
const repoDir = await repoDirPromise;
await expect(
getFileCommitDate(path.join(repoDir, 'test.txt'), {
age: 'newest',
includeAuthor: true,
}),
).resolves.toEqual({
date: new Date('2020-09-13'),
timestamp: new Date('2020-09-13').getTime(),
author: 'Robert',
});
await expect(
getFileCommitDate(path.join(repoDir, 'dest.txt'), {
age: 'newest',
includeAuthor: true,
}),
).resolves.toEqual({
date: new Date('2020-11-13'),
timestamp: new Date('2020-11-13').getTime(),
author: 'Seb',
});
});
it('throws custom error when file is not tracked', async () => {
const repoDir = await repoDirPromise;
await expect(() =>
getFileCommitDate(path.join(repoDir, 'untracked.txt'), {
age: 'newest',
includeAuthor: true,
}),
).rejects.toThrow(FileNotTrackedError);
});
it('throws when file not found', async () => {
const repoDir = await createGitRepoTestFixture();
await expect(() =>
getFileCommitDate(path.join(repoDir, 'nonexistent.txt'), {
age: 'newest',
includeAuthor: true,
}),
).rejects.toThrow(
/Failed to retrieve git history for ".*nonexistent.txt" because the file does not exist./,
);
});
});
describe('commit info APIs', () => {
it('returns creation info for test.txt', async () => {
const repoDir = await repoDirPromise;
const filePath = path.join(repoDir, 'test.txt');
await expect(getGitCreation(filePath)).resolves.toEqual({
author: 'Caroline',
timestamp: new Date('2020-06-19').getTime(),
});
await expect(getGitLastUpdate(filePath)).resolves.toEqual({
author: 'Robert',
timestamp: new Date('2020-09-13').getTime(),
});
});
it('returns creation info for dest.txt', async () => {
const repoDir = await repoDirPromise;
const filePath = path.join(repoDir, 'dest.txt');
await expect(getGitCreation(filePath)).resolves.toEqual({
author: 'Robert',
timestamp: new Date('2020-09-13').getTime(),
});
await expect(getGitLastUpdate(filePath)).resolves.toEqual({
author: 'Seb',
timestamp: new Date('2020-11-13').getTime(),
});
});
it('returns creation info for untracked.txt', async () => {
const repoDir = await repoDirPromise;
const filePath = path.join(repoDir, 'untracked.txt');
await expect(getGitCreation(filePath)).resolves.toEqual(null);
await expect(getGitLastUpdate(filePath)).resolves.toEqual(null);
});
it('returns creation info for non-existing.txt', async () => {
const repoDir = await repoDirPromise;
const filePath = path.join(repoDir, 'non-existing.txt');
await expect(
getGitCreation(filePath),
).rejects.toThrowErrorMatchingInlineSnapshot(
`"An error occurred when trying to get the last update date"`,
);
await expect(
getGitLastUpdate(filePath),
).rejects.toThrowErrorMatchingInlineSnapshot(
`"An error occurred when trying to get the last update date"`,
);
});
it('returns files info', async () => {
const repoDir = await repoDirPromise;
await expect(getGitRepositoryFilesInfo(repoDir)).resolves
.toMatchInlineSnapshot(`
Map {
"dest.txt" => {
"creation": {
"author": "Seb",
"timestamp": 1605225600000,
},
"lastUpdate": {
"author": "Seb",
"timestamp": 1605225600000,
},
},
"moved.txt" => {
"creation": {
"author": "Robert",
"timestamp": 1599955200000,
},
"lastUpdate": {
"author": "Robert",
"timestamp": 1599955200000,
},
},
"test.txt" => {
"creation": {
"author": "Caroline",
"timestamp": 1592524800000,
},
"lastUpdate": {
"author": "Robert",
"timestamp": 1599955200000,
},
},
}
`);
});
});
});
describe('getGitRepoRoot', () => {
async function initTestRepo() {
const {repoDir, git} = await createGitRepoEmpty();
await git.commitFile('subDir/test.txt');
return repoDir;
}
// Create the repo only once for all tests => faster tests
const repoDirPromise = initTestRepo();
it('returns repoDir for cwd=repoDir', async () => {
const repoDir = await repoDirPromise;
const cwd = repoDir;
await expect(getGitRepoRoot(cwd)).resolves.toEqual(repoDir);
});
it('returns repoDir for cwd=repoDir/subDir', async () => {
const repoDir = await repoDirPromise;
const cwd = path.join(repoDir, 'subDir');
await expect(getGitRepoRoot(cwd)).resolves.toEqual(repoDir);
});
it('returns Docusaurus repo for cwd=__dirname', async () => {
const cwd = __dirname;
await expect(getGitRepoRoot(cwd)).resolves.toMatch(/docusaurus$/);
});
it('rejects for cwd=repoDir/doesNotExist', async () => {
const repoDir = await repoDirPromise;
const cwd = path.join(repoDir, 'doesNotExist');
await expect(getGitRepoRoot(cwd)).rejects.toThrow(
/Couldn't find the git repository root directory/,
);
});
});
describe('submodules APIs', () => {
async function initTestRepo() {
const superproject = await createGitRepoEmpty();
await superproject.git.commitFile('README.md');
await superproject.git.commitFile('website/docs/myDoc.md');
const submodule1 = await createGitRepoEmpty();
await submodule1.git.commitFile('file1.txt');
const submodule2 = await createGitRepoEmpty();
await submodule2.git.commitFile('subDir/file2.txt');
await superproject.git.defineSubmodules({
'submodules/submodule1': submodule1.repoDir,
'submodules/submodule2': submodule2.repoDir,
});
return {superproject, submodule1, submodule2};
}
// Create the repo only once for all tests => faster tests
const repoPromise = initTestRepo();
describe('getGitSuperProjectRoot', () => {
it('returns superproject dir for cwd=superproject', async () => {
const repo = await repoPromise;
const cwd = path.join(repo.superproject.repoDir);
await expect(getGitSuperProjectRoot(cwd)).resolves.toEqual(
repo.superproject.repoDir,
);
});
it('returns superproject dir for cwd=superproject/submodules', async () => {
const repo = await repoPromise;
const cwd = path.join(repo.superproject.repoDir, 'submodules');
await expect(getGitSuperProjectRoot(cwd)).resolves.toEqual(
repo.superproject.repoDir,
);
});
it('returns superproject dir for cwd=superproject/website/docs', async () => {
const repo = await repoPromise;
const cwd = path.join(repo.superproject.repoDir, 'website/docs');
await expect(getGitSuperProjectRoot(cwd)).resolves.toEqual(
repo.superproject.repoDir,
);
});
it('returns superproject dir for cwd=submodule1', async () => {
const repo = await repoPromise;
const cwd = path.join(repo.superproject.repoDir, 'submodules/submodule1');
await expect(getGitSuperProjectRoot(cwd)).resolves.toEqual(
repo.superproject.repoDir,
);
});
it('returns superproject dir for cwd=submodule2', async () => {
const repo = await initTestRepo();
const cwd = path.join(repo.superproject.repoDir, 'submodules/submodule2');
await expect(getGitSuperProjectRoot(cwd)).resolves.toEqual(
repo.superproject.repoDir,
);
});
it('returns superproject dir for cwd=submodule2/subDir', async () => {
const repo = await repoPromise;
const cwd = path.join(
repo.superproject.repoDir,
'submodules/submodule2/subDir',
);
await expect(getGitSuperProjectRoot(cwd)).resolves.toEqual(
repo.superproject.repoDir,
);
});
it('rejects for cwd of untracked dir', async () => {
const cwd = await os.tmpdir();
// Do we really want this to throw?
// Not sure, and Git doesn't help us failsafe and return null...
await expect(getGitSuperProjectRoot(cwd)).rejects
.toThrowErrorMatchingInlineSnapshot(`
"Couldn't find the git superproject root directory
Failure while running \`git rev-parse --show-superproject-working-tree\` from cwd="<TEMP_DIR>"
The command executed throws an error: Command failed with exit code 128: git rev-parse --show-superproject-working-tree
fatal: not a git repository (or any of the parent directories): .git"
`);
});
});
describe('getGitSubmodulePaths', () => {
it('returns submodules for cwd=superproject', async () => {
const repo = await repoPromise;
const cwd = path.join(repo.superproject.repoDir);
await expect(getGitSubmodulePaths(cwd)).resolves.toEqual([
'submodules/submodule1',
'submodules/submodule2',
]);
});
it('returns submodules for cwd=superproject/website/docs', async () => {
const repo = await repoPromise;
const cwd = path.join(repo.superproject.repoDir, 'website', 'docs');
await expect(getGitSubmodulePaths(cwd)).resolves.toEqual([
// The returned paths are relative to CWD,
// Not sure if it's the best behavior.
// But you'd rather call this with the superproject root as CWD anyway!
'../../submodules/submodule1',
'../../submodules/submodule2',
]);
});
it('returns [] for cwd=submodules/submodule1', async () => {
const repo = await repoPromise;
const cwd = path.join(
repo.superproject.repoDir,
'submodules',
'submodule1',
);
await expect(getGitSubmodulePaths(cwd)).resolves.toEqual([]);
});
it('returns [] for cwd=submodules/submodule2/subDir', async () => {
const repo = await repoPromise;
const cwd = path.join(
repo.superproject.repoDir,
'submodules',
'submodule2',
'subDir',
);
await expect(getGitSubmodulePaths(cwd)).resolves.toEqual([]);
});
it('rejects for cwd=doesNotExist', async () => {
const repo = await repoPromise;
const cwd = path.join(repo.superproject.repoDir, 'doesNotExist');
await expect(getGitSubmodulePaths(cwd)).rejects.toThrow(
/Couldn't read the list of git submodules/,
);
});
it('rejects for cwd=notTracked', async () => {
const cwd = await os.tmpdir();
await expect(getGitSubmodulePaths(cwd)).rejects.toThrow(
/Couldn't read the list of git submodules/,
);
});
});
describe('getGitAllRepoRoots', () => {
it('returns root paths for cwd=superproject', async () => {
const repo = await repoPromise;
const cwd = path.join(repo.superproject.repoDir);
await expect(getGitAllRepoRoots(cwd)).resolves.toEqual([
repo.superproject.repoDir,
path.join(repo.superproject.repoDir, 'submodules', 'submodule1'),
path.join(repo.superproject.repoDir, 'submodules', 'submodule2'),
]);
});
it('returns root paths for cwd=superproject/website/docs', async () => {
const repo = await repoPromise;
const cwd = path.join(repo.superproject.repoDir, 'website', 'docs');
await expect(getGitAllRepoRoots(cwd)).resolves.toEqual([
repo.superproject.repoDir,
path.join(repo.superproject.repoDir, 'submodules', 'submodule1'),
path.join(repo.superproject.repoDir, 'submodules', 'submodule2'),
]);
});
it('returns root paths for cwd=superproject/submodules', async () => {
const repo = await repoPromise;
const cwd = path.join(repo.superproject.repoDir, 'submodules');
await expect(getGitAllRepoRoots(cwd)).resolves.toEqual([
repo.superproject.repoDir,
path.join(repo.superproject.repoDir, 'submodules', 'submodule1'),
path.join(repo.superproject.repoDir, 'submodules', 'submodule2'),
]);
});
it('returns root paths for cwd=superproject/submodules/submodule1', async () => {
const repo = await repoPromise;
const cwd = path.join(
repo.superproject.repoDir,
'submodules',
'submodule1',
);
await expect(getGitAllRepoRoots(cwd)).resolves.toEqual([
repo.superproject.repoDir,
path.join(repo.superproject.repoDir, 'submodules', 'submodule1'),
path.join(repo.superproject.repoDir, 'submodules', 'submodule2'),
]);
});
it('returns root paths for cwd=superproject/submodules/submodule2/subDir', async () => {
const repo = await repoPromise;
const cwd = path.join(
repo.superproject.repoDir,
'submodules',
'submodule2',
'subDir',
);
await expect(getGitAllRepoRoots(cwd)).resolves.toEqual([
repo.superproject.repoDir,
path.join(repo.superproject.repoDir, 'submodules', 'submodule1'),
path.join(repo.superproject.repoDir, 'submodules', 'submodule2'),
]);
});
it('rejects for cwd=doesNotExist', async () => {
const repo = await repoPromise;
const cwd = path.join(repo.superproject.repoDir, 'doesNotExist');
await expect(getGitAllRepoRoots(cwd)).rejects.toThrow(
/Could not get all the git repository root paths/,
);
});
it('rejects for cwd=notTracked', async () => {
const cwd = await os.tmpdir();
await expect(getGitAllRepoRoots(cwd)).rejects.toThrow(
/Could not get all the git repository root paths/,
);
});
});
describe('getGitRepositoryFilesInfo', () => {
it('for superproject', async () => {
const repo = await repoPromise;
const cwd = path.join(repo.superproject.repoDir);
await expect(getGitRepositoryFilesInfo(cwd)).resolves
.toMatchInlineSnapshot(`
Map {
"website/docs/myDoc.md" => {
"creation": {
"author": "Seb",
"timestamp": 1592524800000,
},
"lastUpdate": {
"author": "Seb",
"timestamp": 1592524800000,
},
},
"README.md" => {
"creation": {
"author": "Seb",
"timestamp": 1592524800000,
},
"lastUpdate": {
"author": "Seb",
"timestamp": 1592524800000,
},
},
}
`);
});
it('for submodule1', async () => {
const repo = await repoPromise;
const cwd = path.join(
repo.superproject.repoDir,
'submodules',
'submodule1',
);
await expect(getGitRepositoryFilesInfo(cwd)).resolves
.toMatchInlineSnapshot(`
Map {
"file1.txt" => {
"creation": {
"author": "Seb",
"timestamp": 1592524800000,
},
"lastUpdate": {
"author": "Seb",
"timestamp": 1592524800000,
},
},
}
`);
});
it('for submodule2', async () => {
const repo = await repoPromise;
const cwd = path.join(
repo.superproject.repoDir,
'submodules',
'submodule2',
);
await expect(getGitRepositoryFilesInfo(cwd)).resolves
.toMatchInlineSnapshot(`
Map {
"subDir/file2.txt" => {
"creation": {
"author": "Seb",
"timestamp": 1592524800000,
},
"lastUpdate": {
"author": "Seb",
"timestamp": 1592524800000,
},
},
}
`);
});
});
});

View File

@ -0,0 +1,524 @@
/**
* Copyright (c) Facebook, Inc. and its affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree.
*/
import path from 'path';
import fs from 'fs-extra';
import os from 'os';
import _ from 'lodash';
import execa from 'execa';
import PQueue from 'p-queue';
import logger from '@docusaurus/logger';
// Quite high/conservative concurrency value (it was previously "Infinity")
// See https://github.com/facebook/docusaurus/pull/10915
const DefaultGitCommandConcurrency =
// TODO Docusaurus v4: bump node, availableParallelism() now always exists
(typeof os.availableParallelism === 'function'
? os.availableParallelism()
: os.cpus().length) * 4;
const GitCommandConcurrencyEnv = process.env.DOCUSAURUS_GIT_COMMAND_CONCURRENCY
? parseInt(process.env.DOCUSAURUS_GIT_COMMAND_CONCURRENCY, 10)
: undefined;
const GitCommandConcurrency =
GitCommandConcurrencyEnv && GitCommandConcurrencyEnv > 0
? GitCommandConcurrencyEnv
: DefaultGitCommandConcurrency;
// We use a queue to avoid running too many concurrent Git commands at once
// See https://github.com/facebook/docusaurus/issues/10348
const GitCommandQueue = new PQueue({
concurrency: GitCommandConcurrency,
});
const realHasGitFn = () => {
try {
return execa.sync('git', ['--version']).exitCode === 0;
} catch (error) {
return false;
}
};
// The hasGit call is synchronous IO so we memoize it
// The user won't install Git in the middle of a build anyway...
const hasGit =
process.env.NODE_ENV === 'test' ? realHasGitFn : _.memoize(realHasGitFn);
// TODO Docusaurus v4: remove this
// Exceptions are not made for control flow logic
/** Custom error thrown when git is not found in `PATH`. */
export class GitNotFoundError extends Error {}
// TODO Docusaurus v4: remove this, only kept for retro-compatibility
// Exceptions are not made for control flow logic
/** Custom error thrown when the current file is not tracked by git. */
export class FileNotTrackedError extends Error {}
/**
* Fetches the git history of a file and returns a relevant commit date.
* It gets the commit date instead of author date so that amended commits
* can have their dates updated.
*
* @throws {@link GitNotFoundError} If git is not found in `PATH`.
* @throws {@link FileNotTrackedError} If the current file is not tracked by git.
* @throws Also throws when `git log` exited with non-zero, or when it outputs
* unexpected text.
*/
export async function getFileCommitDate(
/** Absolute path to the file. */
file: string,
args: {
/**
* `"oldest"` is the commit that added the file, following renames;
* `"newest"` is the last commit that edited the file.
*/
age?: 'oldest' | 'newest';
/** Use `includeAuthor: true` to get the author information as well. */
includeAuthor?: false;
},
): Promise<{
/** Relevant commit date. */
date: Date; // TODO duplicate data, not really useful?
/** Timestamp returned from git, converted to **milliseconds**. */
timestamp: number;
}>;
/**
* Fetches the git history of a file and returns a relevant commit date.
* It gets the commit date instead of author date so that amended commits
* can have their dates updated.
*
* @throws {@link GitNotFoundError} If git is not found in `PATH`.
* @throws {@link FileNotTrackedError} If the current file is not tracked by git.
* @throws Also throws when `git log` exited with non-zero, or when it outputs
* unexpected text.
*/
export async function getFileCommitDate(
/** Absolute path to the file. */
file: string,
args: {
/**
* `"oldest"` is the commit that added the file, following renames;
* `"newest"` is the last commit that edited the file.
*/
age?: 'oldest' | 'newest';
includeAuthor: true;
},
): Promise<{
/** Relevant commit date. */
date: Date;
/** Timestamp returned from git, converted to **milliseconds**. */
timestamp: number;
/** The author's name, as returned from git. */
author: string;
}>;
export async function getFileCommitDate(
file: string,
{
age = 'oldest',
includeAuthor = false,
}: {
age?: 'oldest' | 'newest';
includeAuthor?: boolean;
},
): Promise<{
date: Date;
timestamp: number;
author?: string;
}> {
if (!hasGit()) {
throw new GitNotFoundError(
`Failed to retrieve git history for "${file}" because git is not installed.`,
);
}
if (!(await fs.pathExists(file))) {
throw new Error(
`Failed to retrieve git history for "${file}" because the file does not exist.`,
);
}
// We add a "RESULT:" prefix to make parsing easier
// See why: https://github.com/facebook/docusaurus/pull/10022
const resultFormat = includeAuthor ? 'RESULT:%ct,%an' : 'RESULT:%ct';
const args = [
`--format=${resultFormat}`,
'--max-count=1',
age === 'oldest' ? '--follow --diff-filter=A' : undefined,
]
.filter(Boolean)
.join(' ');
// Do not include GPG signature in the log output
// See https://github.com/facebook/docusaurus/pull/10022
const command = `git -c log.showSignature=false log ${args} -- "${path.basename(
file,
)}"`;
const result = (await GitCommandQueue.add(() => {
return execa(command, {
cwd: path.dirname(file),
shell: true,
});
}))!;
if (result.exitCode !== 0) {
throw new Error(
`Failed to retrieve the git history for file "${file}" with exit code ${result.exitCode}: ${result.stderr}`,
);
}
// We only parse the output line starting with our "RESULT:" prefix
// See why https://github.com/facebook/docusaurus/pull/10022
const regex = includeAuthor
? /(?:^|\n)RESULT:(?<timestamp>\d+),(?<author>.+)(?:$|\n)/
: /(?:^|\n)RESULT:(?<timestamp>\d+)(?:$|\n)/;
const output = result.stdout.trim();
if (!output) {
throw new FileNotTrackedError(
`Failed to retrieve the git history for file "${file}" because the file is not tracked by git.`,
);
}
const match = output.match(regex);
if (!match) {
throw new Error(
`Failed to retrieve the git history for file "${file}" with unexpected output: ${output}`,
);
}
const timestampInSeconds = Number(match.groups!.timestamp);
const timestamp = timestampInSeconds * 1_000;
const date = new Date(timestamp);
if (includeAuthor) {
return {date, timestamp, author: match.groups!.author!};
}
return {date, timestamp};
}
let showedGitRequirementError = false;
let showedFileNotTrackedError = false;
type GitCommitInfo = {timestamp: number; author: string};
async function getGitCommitInfo(
filePath: string,
age: 'oldest' | 'newest',
): Promise<GitCommitInfo | null> {
if (!filePath) {
return null;
}
// Wrap in try/catch in case the shell commands fail
// (e.g. project doesn't use Git, etc).
try {
const result = await getFileCommitDate(filePath, {
age,
includeAuthor: true,
});
return {timestamp: result.timestamp, author: result.author};
} catch (err) {
// TODO legacy perf issue: do not use exceptions for control flow!
if (err instanceof GitNotFoundError) {
if (!showedGitRequirementError) {
logger.warn('Sorry, the last update options require Git.');
showedGitRequirementError = true;
}
} else if (err instanceof FileNotTrackedError) {
if (!showedFileNotTrackedError) {
logger.warn(
'Cannot infer the update date for some files, as they are not tracked by git.',
);
showedFileNotTrackedError = true;
}
} else {
throw new Error(
`An error occurred when trying to get the last update date`,
{cause: err},
);
}
return null;
}
}
export async function getGitLastUpdate(
filePath: string,
): Promise<GitCommitInfo | null> {
return getGitCommitInfo(filePath, 'newest');
}
export async function getGitCreation(
filePath: string,
): Promise<GitCommitInfo | null> {
return getGitCommitInfo(filePath, 'oldest');
}
export async function getGitRepoRoot(cwd: string): Promise<string> {
const createErrorMessageBase = () => {
return `Couldn't find the git repository root directory
Failure while running ${logger.code(
'git rev-parse --show-toplevel',
)} from cwd=${logger.path(cwd)}`;
};
const result = await execa('git', ['rev-parse', '--show-toplevel'], {
cwd,
}).catch((error) => {
// We enter this rejection when cwd is not a dir for example
throw new Error(
`${createErrorMessageBase()}
The command executed throws an error: ${error.message}`,
{cause: error},
);
});
if (result.exitCode !== 0) {
throw new Error(
`${createErrorMessageBase()}
The command returned exit code ${logger.code(result.exitCode)}: ${logger.subdue(
result.stderr,
)}`,
);
}
return fs.realpath.native(result.stdout.trim());
}
// A Git "superproject" is a Git repository that contains submodules
// See https://git-scm.com/docs/git-rev-parse#Documentation/git-rev-parse.txt---show-superproject-working-tree
// See https://git-scm.com/book/en/v2/Git-Tools-Submodules
export async function getGitSuperProjectRoot(
cwd: string,
): Promise<string | null> {
const createErrorMessageBase = () => {
return `Couldn't find the git superproject root directory
Failure while running ${logger.code(
'git rev-parse --show-superproject-working-tree',
)} from cwd=${logger.path(cwd)}`;
};
const result = await execa(
'git',
['rev-parse', '--show-superproject-working-tree'],
{
cwd,
},
).catch((error) => {
// We enter this rejection when cwd is not a dir for example
throw new Error(
`${createErrorMessageBase()}
The command executed throws an error: ${error.message}`,
{cause: error},
);
});
if (result.exitCode !== 0) {
throw new Error(
`${createErrorMessageBase()}
The command returned exit code ${logger.code(result.exitCode)}: ${logger.subdue(
result.stderr,
)}`,
);
}
const output = result.stdout.trim();
// this command only works when inside submodules
// otherwise it doesn't return anything when we are inside the main repo
if (output) {
return fs.realpath.native(output);
}
return getGitRepoRoot(cwd);
}
// See https://git-scm.com/book/en/v2/Git-Tools-Submodules
export async function getGitSubmodulePaths(cwd: string): Promise<string[]> {
const createErrorMessageBase = () => {
return `Couldn't read the list of git submodules
Failure while running ${logger.code(
'git submodule status',
)} from cwd=${logger.path(cwd)}`;
};
const result = await execa('git', ['submodule', 'status'], {
cwd,
}).catch((error) => {
// We enter this rejection when cwd is not a dir for example
throw new Error(
`${createErrorMessageBase()}
The command executed throws an error: ${error.message}`,
{cause: error},
);
});
if (result.exitCode !== 0) {
throw new Error(
`${createErrorMessageBase()}
The command returned exit code ${logger.code(result.exitCode)}: ${logger.subdue(
result.stderr,
)}`,
);
}
const output = result.stdout.trim();
if (!output) {
return [];
}
/* The output may contain a space/-/+/U prefix, for example
1234567e3e35d1f5b submodules/foo (heads/main)
-9ab1f1d3a2d77b0a4 submodules/bar (heads/dev)
+f00ba42e1b3ddead submodules/baz (remotes/origin/main)
Udeadbeefcafe1234 submodules/qux
*/
const getSubmodulePath = async (line: string) => {
const submodulePath = line.substring(1).split(' ')[1];
if (!submodulePath) {
throw new Error(`Failed to parse git submodule line: ${line}`);
}
return submodulePath;
};
return Promise.all(output.split('\n').map(getSubmodulePath));
}
// Find the root git repository alongside all its submodules, if any
export async function getGitAllRepoRoots(cwd: string): Promise<string[]> {
try {
const superProjectRoot = await getGitSuperProjectRoot(cwd);
if (!superProjectRoot) {
return [];
}
let submodulePaths = await getGitSubmodulePaths(superProjectRoot);
submodulePaths = await Promise.all(
submodulePaths.map((submodulePath) =>
fs.realpath.native(path.resolve(superProjectRoot, submodulePath)),
),
);
return [superProjectRoot, ...submodulePaths];
} catch (error) {
throw new Error(
`Could not get all the git repository root paths (superproject + submodules) from cwd=${cwd}`,
{cause: error},
);
}
}
// Useful information about a file tracked in a Git repository
export type GitFileInfo = {
creation: GitCommitInfo;
lastUpdate: GitCommitInfo;
};
// A map of all the files tracked in a Git repository
export type GitFileInfoMap = Map<string, GitFileInfo>;
// Logic inspired from Astro Starlight:
// See https://bsky.app/profile/bluwy.me/post/3lyihod6qos2a
// See https://github.com/withastro/starlight/blob/c417f1efd463be63b7230617d72b120caed098cd/packages/starlight/utils/git.ts#L58
export async function getGitRepositoryFilesInfo(
cwd: string,
): Promise<GitFileInfoMap> {
// git --no-pager -c log.showSignature=false log --format=t:%ct,a:%an --name-status
const result = await execa(
'git',
[
'--no-pager',
// Do not include GPG signature in the log output
// See https://github.com/facebook/docusaurus/pull/10022
'-c',
'log.showSignature=false',
// The git command we want to run
'log',
// Format each history entry as t:<seconds since epoch>
'--format=t:%ct,a:%an',
// In each entry include the name and status for each modified file
'--name-status',
// For creation info, should we use --follow --find-renames=100% ???
],
{
cwd,
encoding: 'utf-8',
// TODO use streaming to avoid a large buffer
// See https://github.com/withastro/starlight/issues/3154
maxBuffer: 20 * 1024 * 1024,
},
);
if (result.exitCode !== 0) {
throw new Error(
`Docusaurus failed to run the 'git log' to retrieve tracked files last update date/author.
The command exited with code ${result.exitCode}: ${result.stderr}`,
);
}
const logLines = result.stdout.split('\n');
const now = Date.now();
// TODO not fail-fast
let runningDate = now;
let runningAuthor = 'N/A';
const runningMap: GitFileInfoMap = new Map();
for (const logLine of logLines) {
if (logLine.startsWith('t:')) {
// t:<timestamp>,a:<author name>
const [timestampStr, authorStr] = logLine.split(',') as [string, string];
const timestamp = Number.parseInt(timestampStr.slice(2), 10) * 1000;
const author = authorStr.slice(2);
runningDate = timestamp;
runningAuthor = author;
}
// TODO the code below doesn't handle delete/move/rename operations properly
// it returns files that no longer exist in the repo (deleted/moved)
// - Added files take the format `A\t<file>`
// - Modified files take the format `M\t<file>`
// - Deleted files take the format `D\t<file>`
// - Renamed files take the format `R<count>\t<old>\t<new>`
// - Copied files take the format `C<count>\t<old>\t<new>`
// The name of the file as of the commit being processed is always
// the last part of the log line.
const tabSplit = logLine.lastIndexOf('\t');
if (tabSplit === -1) {
continue;
}
const relativeFile = logLine.slice(tabSplit + 1);
const currentFileInfo = runningMap.get(relativeFile);
const currentCreationTime = currentFileInfo?.creation.timestamp || now;
const newCreationTime = Math.min(currentCreationTime, runningDate);
const newCreation: GitCommitInfo =
!currentFileInfo || newCreationTime !== currentCreationTime
? {timestamp: newCreationTime, author: runningAuthor}
: currentFileInfo.creation;
const currentLastUpdateTime = currentFileInfo?.lastUpdate.timestamp || 0;
const newLastUpdateTime = Math.max(currentLastUpdateTime, runningDate);
const newLastUpdate: GitCommitInfo =
!currentFileInfo || newLastUpdateTime !== currentLastUpdateTime
? {timestamp: newLastUpdateTime, author: runningAuthor}
: currentFileInfo.lastUpdate;
runningMap.set(relativeFile, {
creation: newCreation,
lastUpdate: newLastUpdate,
});
}
return runningMap;
}

View File

@ -0,0 +1,54 @@
/**
* Copyright (c) Facebook, Inc. and its affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree.
*/
import {
VCS_HARDCODED_CREATION_INFO,
VCS_HARDCODED_LAST_UPDATE_INFO,
VCS_HARDCODED_UNTRACKED_FILE_PATH,
VcsHardcoded,
} from './vcsHardcoded';
import {VcsGitAdHoc} from './vcsGitAdHoc';
import {VscGitEager} from './vcsGitEager';
import {VcsDisabled} from './vcsDisabled';
import {VcsDefaultV1} from './vcsDefaultV1';
import {VcsDefaultV2} from './vcsDefaultV2';
import type {VcsConfig, VcsPreset} from '@docusaurus/types';
const VcsPresets: Record<VcsPreset, VcsConfig> = {
'git-ad-hoc': VcsGitAdHoc,
'git-eager': VscGitEager,
hardcoded: VcsHardcoded,
disabled: VcsDisabled,
'default-v1': VcsDefaultV1,
'default-v2': VcsDefaultV2,
};
export const VcsPresetNames = Object.keys(VcsPresets) as VcsPreset[];
export function findVcsPreset(presetName: string): VcsConfig | undefined {
return VcsPresets[presetName as VcsPreset];
}
export function getVcsPreset(presetName: VcsPreset): VcsConfig {
const vcs = findVcsPreset(presetName);
if (vcs) {
return vcs;
} else {
throw new Error(
`Unknown Docusaurus VCS preset name: ${process.env.DOCUSAURUS_VCS}`,
);
}
}
// Convenient export for writing unit tests depending on VCS
export const TEST_VCS = {
CREATION_INFO: VCS_HARDCODED_CREATION_INFO,
LAST_UPDATE_INFO: VCS_HARDCODED_LAST_UPDATE_INFO,
UNTRACKED_FILE_PATH: VCS_HARDCODED_UNTRACKED_FILE_PATH,
...VcsHardcoded,
};

View File

@ -0,0 +1,33 @@
/**
* Copyright (c) Facebook, Inc. and its affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree.
*/
import {VcsHardcoded} from './vcsHardcoded';
import {VcsGitAdHoc} from './vcsGitAdHoc';
import type {VcsConfig} from '@docusaurus/types';
function getDynamicStrategy(): VcsConfig {
return process.env.NODE_ENV === 'development' ||
process.env.NODE_ENV === 'test'
? VcsHardcoded
: VcsGitAdHoc;
}
/**
* This VCS implements the historical Git automatic strategy.
* It is only enabled in production mode, using ad-hoc git log commands.
*/
export const VcsDefaultV1: VcsConfig = {
initialize: (...params) => {
return getDynamicStrategy().initialize(...params);
},
getFileCreationInfo: (...params) => {
return getDynamicStrategy().getFileCreationInfo(...params);
},
getFileLastUpdateInfo: (...params) => {
return getDynamicStrategy().getFileLastUpdateInfo(...params);
},
};

View File

@ -0,0 +1,33 @@
/**
* Copyright (c) Facebook, Inc. and its affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree.
*/
import {VcsHardcoded} from './vcsHardcoded';
import {VscGitEager} from './vcsGitEager';
import type {VcsConfig} from '@docusaurus/types';
function getStrategy(): VcsConfig {
return process.env.NODE_ENV === 'development' ||
process.env.NODE_ENV === 'test'
? VcsHardcoded
: VscGitEager;
}
/**
* This VCS implements the new eager Git automatic strategy.
* It is only enabled in production mode, reading the git repository eagerly.
*/
export const VcsDefaultV2: VcsConfig = {
initialize: (...params) => {
return getStrategy().initialize(...params);
},
getFileCreationInfo: (...params) => {
return getStrategy().getFileCreationInfo(...params);
},
getFileLastUpdateInfo: (...params) => {
return getStrategy().getFileLastUpdateInfo(...params);
},
};

View File

@ -0,0 +1,25 @@
/**
* Copyright (c) Facebook, Inc. and its affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree.
*/
import type {VcsConfig} from '@docusaurus/types';
/**
* This VCS implementation always returns null values
*/
export const VcsDisabled: VcsConfig = {
initialize: () => {
// Noop
},
getFileCreationInfo: async (_filePath) => {
return null;
},
getFileLastUpdateInfo: async (_ilePath) => {
return null;
},
};

View File

@ -0,0 +1,30 @@
/**
* Copyright (c) Facebook, Inc. and its affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree.
*/
import {getGitLastUpdate, getGitCreation} from './gitUtils';
import type {VcsConfig} from '@docusaurus/types';
/**
* A VCS strategy to query Git information in an ad-hoc way.
* This is the default/historical Docusaurus Git VCS implementation.
* Unfortunately, it is a major bottleneck for large sites/repositories.
*
* See also https://github.com/facebook/docusaurus/issues/11208
*/
export const VcsGitAdHoc: VcsConfig = {
initialize: () => {
// Nothing to do here for the default/historical Git implementation
},
getFileCreationInfo: async (filePath: string) => {
return getGitCreation(filePath);
},
getFileLastUpdateInfo: async (filePath: string) => {
return getGitLastUpdate(filePath);
},
};

View File

@ -0,0 +1,99 @@
/**
* Copyright (c) Facebook, Inc. and its affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree.
*/
import {resolve, basename} from 'node:path';
import logger, {PerfLogger} from '@docusaurus/logger';
import {getGitAllRepoRoots, getGitRepositoryFilesInfo} from './gitUtils';
import type {GitFileInfo, GitFileInfoMap} from './gitUtils';
import type {VcsConfig} from '@docusaurus/types';
// The Map keys should be absolute file paths, not relative Git paths
function resolveFileInfoMapPaths(
repoRoot: string,
filesInfo: GitFileInfoMap,
): GitFileInfoMap {
function transformMapEntry(
entry: [string, GitFileInfo],
): [string, GitFileInfo] {
// We just resolve the Git paths that are relative to the repo root
return [resolve(repoRoot, entry[0]), entry[1]];
}
return new Map(Array.from(filesInfo.entries()).map(transformMapEntry));
}
function mergeFileMaps(fileMaps: GitFileInfoMap[]): GitFileInfoMap {
return new Map(fileMaps.flatMap((m) => [...m]));
}
async function loadAllGitFilesInfoMap(cwd: string): Promise<GitFileInfoMap> {
const roots = await PerfLogger.async('Reading Git root dirs', () =>
getGitAllRepoRoots(cwd),
);
const allMaps: GitFileInfoMap[] = await Promise.all(
roots.map(async (root) => {
const map = await PerfLogger.async(
`Reading Git history for repo ${logger.path(basename(root))}`,
() => getGitRepositoryFilesInfo(root),
);
return resolveFileInfoMapPaths(root, map);
}),
);
return mergeFileMaps(allMaps);
}
function createGitVcsConfig(): VcsConfig {
let filesMapPromise: Promise<GitFileInfoMap> | null = null;
async function getGitFileInfo(filePath: string): Promise<GitFileInfo | null> {
const filesMap = await filesMapPromise;
return filesMap?.get(filePath) ?? null;
}
return {
initialize: ({siteDir}) => {
if (filesMapPromise) {
// We only initialize this VCS once!
// For i18n sites, this permits reading ahead of time for all locales
// so that it only slows down the first locale
// I assume this logic is fine, but we'll see if it causes trouble
// Note: we could also only call "initialize()" once from the outside,
// But maybe it could be useful for custom VCS implementations to be
// able to initialize once per locale?
PerfLogger.log(
'Git Eager VCS strategy already initialized, skipping re-initialization',
);
return;
}
filesMapPromise = PerfLogger.async('Git Eager VCS init', () =>
loadAllGitFilesInfoMap(siteDir),
);
filesMapPromise.catch((error) => {
console.error(
'Failed to initialize the Docusaurus Git Eager VCS strategy',
error,
);
});
},
getFileCreationInfo: async (filePath: string) => {
const fileInfo = await getGitFileInfo(filePath);
return fileInfo?.creation ?? null;
},
getFileLastUpdateInfo: async (filePath: string) => {
const fileInfo = await getGitFileInfo(filePath);
return fileInfo?.lastUpdate ?? null;
},
};
}
export const VscGitEager: VcsConfig = createGitVcsConfig();

View File

@ -0,0 +1,45 @@
/**
* Copyright (c) Facebook, Inc. and its affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree.
*/
import type {VcsConfig, VcsChangeInfo} from '@docusaurus/types';
export const VCS_HARDCODED_CREATION_INFO: VcsChangeInfo = {
timestamp: 1490997600000, // 1st Apr 2017
author: 'Creator',
};
export const VCS_HARDCODED_LAST_UPDATE_INFO: VcsChangeInfo = {
timestamp: 1539502055000, // 14th Oct 2018
author: 'Author',
};
export const VCS_HARDCODED_UNTRACKED_FILE_PATH = `file/path/${Math.random()}.mdx`;
/**
* This VCS implementation always returns hardcoded values for testing purposes.
* It is also useful in dev environments where VCS info is not important.
* Reading information from the VCS can be slow and is not always necessary.
*/
export const VcsHardcoded: VcsConfig = {
initialize: () => {
// Noop
},
getFileCreationInfo: async (filePath: string) => {
if (filePath === VCS_HARDCODED_UNTRACKED_FILE_PATH) {
return null;
}
return VCS_HARDCODED_CREATION_INFO;
},
getFileLastUpdateInfo: async (filePath: string) => {
if (filePath === VCS_HARDCODED_UNTRACKED_FILE_PATH) {
return null;
}
return VCS_HARDCODED_LAST_UPDATE_INFO;
},
};

View File

@ -51,7 +51,7 @@
"escape-html": "^1.0.3",
"eta": "^2.2.0",
"eval": "^0.1.8",
"execa": "5.1.1",
"execa": "^5.1.1",
"fs-extra": "^11.1.1",
"html-tags": "^3.3.1",
"html-webpack-plugin": "^5.6.0",

View File

@ -28,7 +28,7 @@ describe('isInternalUrl', () => {
expect(isInternalUrl('https://foo.com')).toBeFalsy();
});
it('returns false for whatever protocol links', () => {
it('returns false for relative protocol links', () => {
expect(isInternalUrl('//foo.com')).toBeFalsy();
});
@ -43,4 +43,50 @@ describe('isInternalUrl', () => {
it('returns false for undefined links', () => {
expect(isInternalUrl(undefined)).toBeFalsy();
});
describe('custom scheme links', () => {
it('returns true for invalid protocol schemes', () => {
expect(isInternalUrl('+customScheme://')).toBeTruthy();
expect(isInternalUrl('+customScheme://whatever')).toBeTruthy();
expect(isInternalUrl('+customScheme:whatever')).toBeTruthy();
expect(isInternalUrl('.customScheme://')).toBeTruthy();
expect(isInternalUrl('.customScheme://whatever')).toBeTruthy();
expect(isInternalUrl('.customScheme:whatever')).toBeTruthy();
expect(isInternalUrl('-customScheme://')).toBeTruthy();
expect(isInternalUrl('-customScheme://whatever')).toBeTruthy();
expect(isInternalUrl('-customScheme:whatever')).toBeTruthy();
expect(isInternalUrl('custom_scheme://')).toBeTruthy();
expect(isInternalUrl('custom_scheme://whatever')).toBeTruthy();
expect(isInternalUrl('custom_scheme:whatever')).toBeTruthy();
expect(isInternalUrl('custom scheme://')).toBeTruthy();
expect(isInternalUrl('custom scheme://whatever')).toBeTruthy();
expect(isInternalUrl('custom scheme:whatever')).toBeTruthy();
expect(isInternalUrl('custom$scheme://')).toBeTruthy();
expect(isInternalUrl('custom$scheme://whatever')).toBeTruthy();
expect(isInternalUrl('custom$scheme:whatever')).toBeTruthy();
});
it('returns false valid protocol schemes', () => {
expect(isInternalUrl('customScheme://')).toBeFalsy();
expect(isInternalUrl('customScheme://whatever')).toBeFalsy();
expect(isInternalUrl('customScheme:whatever')).toBeFalsy();
expect(isInternalUrl('custom-scheme://')).toBeFalsy();
expect(isInternalUrl('custom-scheme://whatever')).toBeFalsy();
expect(isInternalUrl('custom-scheme:whatever')).toBeFalsy();
expect(isInternalUrl('custom.scheme://')).toBeFalsy();
expect(isInternalUrl('custom.scheme://whatever')).toBeFalsy();
expect(isInternalUrl('custom.scheme:whatever')).toBeFalsy();
expect(isInternalUrl('custom-sch.eme+-.://')).toBeFalsy();
expect(isInternalUrl('custom-sch.eme+-.://whatever')).toBeFalsy();
expect(isInternalUrl('custom-sch.eme+-.:whatever')).toBeFalsy();
});
});
});

View File

@ -5,8 +5,11 @@
* LICENSE file in the root directory of this source tree.
*/
// Poor man's protocol detection
// Spec: https://datatracker.ietf.org/doc/html/rfc3986#section-3.1
// In particular: scheme = ALPHA *( ALPHA / DIGIT / "+" / "-" / "." )
export function hasProtocol(url: string): boolean {
return /^(?:\w*:|\/\/)/.test(url);
return /^(?:[A-Za-z][A-Za-z\d+.-]*:|\/\/)/.test(url);
}
export default function isInternalUrl(url?: string): boolean {

View File

@ -8,10 +8,10 @@
import fs from 'fs-extra';
import logger, {PerfLogger} from '@docusaurus/logger';
import {mapAsyncSequential} from '@docusaurus/utils';
import {loadContext, type LoadContextParams} from '../../server/site';
import {loadI18n} from '../../server/i18n';
import {type LoadContextParams} from '../../server/site';
import {getLocaleList} from '../../server/i18n';
import {buildLocale, type BuildLocaleParams} from './buildLocale';
import {isAutomaticBaseUrlLocalizationDisabled} from './buildUtils';
import {loadSiteConfig} from '../../server/config';
export type BuildCLIOptions = Pick<LoadContextParams, 'config' | 'outDir'> & {
locale?: [string, ...string[]];
@ -81,27 +81,21 @@ async function getLocalesToBuild({
siteDir: string;
cliOptions: BuildCLIOptions;
}): Promise<[string, ...string[]]> {
// TODO we shouldn't need to load all context + i18n just to get that list
// only loading siteConfig should be enough
const context = await loadContext({
const {siteConfig} = await loadSiteConfig({
siteDir,
outDir: cliOptions.outDir,
config: cliOptions.config,
automaticBaseUrlLocalizationDisabled: isAutomaticBaseUrlLocalizationDisabled(cliOptions),
customConfigFilePath: cliOptions.config,
});
const i18n = await loadI18n({
siteDir,
config: context.siteConfig,
currentLocale: context.siteConfig.i18n.defaultLocale, // Awkward but ok
automaticBaseUrlLocalizationDisabled: false,
});
const locales = cliOptions.locale ?? i18n.locales;
const locales =
cliOptions.locale ??
getLocaleList({
i18nConfig: siteConfig.i18n,
currentLocale: siteConfig.i18n.defaultLocale, // Awkward but ok
});
return orderLocales({
locales: locales as [string, ...string[]],
defaultLocale: i18n.defaultLocale,
defaultLocale: siteConfig.i18n.defaultLocale,
});
}

Some files were not shown because too many files have changed in this diff Show More