mirror of
https://github.com/facebook/docusaurus.git
synced 2025-12-26 09:43:10 +00:00
Compare commits
4 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
abfbe5621b | ||
|
|
83f5ec9678 | ||
|
|
865365f0a2 | ||
|
|
0d82de27de |
|
|
@ -27,10 +27,10 @@ jobs:
|
|||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Check out repository code
|
||||
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
|
||||
- name: Use Node.js
|
||||
uses: actions/setup-node@395ad3262231945c25e8478fd5baf05154b1d79f # v6.1.0
|
||||
uses: actions/setup-node@a0853c24544627f65ddf259abe73b1d18a591444 # v5.0.0
|
||||
with:
|
||||
node-version: lts/*
|
||||
cache: yarn
|
||||
|
|
|
|||
|
|
@ -22,9 +22,9 @@ jobs:
|
|||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
- name: Set up Node
|
||||
uses: actions/setup-node@395ad3262231945c25e8478fd5baf05154b1d79f # v6.1.0
|
||||
uses: actions/setup-node@a0853c24544627f65ddf259abe73b1d18a591444 # v5.0.0
|
||||
with:
|
||||
node-version: lts/*
|
||||
cache: yarn
|
||||
|
|
@ -32,5 +32,3 @@ jobs:
|
|||
run: yarn || yarn || yarn
|
||||
- name: Build blog-only
|
||||
run: yarn workspace website build:blogOnly
|
||||
env:
|
||||
DOCUSAURUS_PERF_LOGGER: 'true'
|
||||
|
|
|
|||
|
|
@ -25,9 +25,9 @@ jobs:
|
|||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
- name: Set up Node
|
||||
uses: actions/setup-node@395ad3262231945c25e8478fd5baf05154b1d79f # v6.1.0
|
||||
uses: actions/setup-node@a0853c24544627f65ddf259abe73b1d18a591444 # v5.0.0
|
||||
with:
|
||||
node-version: lts/*
|
||||
cache: yarn
|
||||
|
|
@ -37,27 +37,26 @@ jobs:
|
|||
- name: Build Hash Router
|
||||
run: yarn build:website:fast
|
||||
env:
|
||||
DOCUSAURUS_PERF_LOGGER: 'true'
|
||||
DOCUSAURUS_ROUTER: 'hash'
|
||||
# Note: hash router + baseUrl do not play well together
|
||||
# This would host at https://facebook.github.io/docusaurus/#/docusaurus/
|
||||
# BASE_URL: '/docusaurus/' # hash router +
|
||||
|
||||
- name: Upload Website artifact
|
||||
uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: website-hash-router-archive
|
||||
path: website/build
|
||||
|
||||
#- name: Upload Website Pages artifact
|
||||
# uses: actions/upload-pages-artifact@7b1f4a764d45c48632c6b24a0339c27f5614fb0b # v4.0.0
|
||||
# uses: actions/upload-pages-artifact@v3
|
||||
# with:
|
||||
# path: website/build
|
||||
|
||||
# Deploy to https://facebook.github.io/docusaurus/
|
||||
- name: Deploy to GitHub Pages
|
||||
if: ${{ github.event_name != 'pull_request' && github.ref_name == 'main' }}
|
||||
uses: peaceiris/actions-gh-pages@4f9cc6602d3f66b9c108549d475ec49e8ef4d45e # v4.0.0
|
||||
uses: peaceiris/actions-gh-pages@v4
|
||||
with:
|
||||
github_token: ${{ secrets.GITHUB_TOKEN }}
|
||||
publish_dir: website/build
|
||||
|
|
@ -81,4 +80,4 @@ jobs:
|
|||
# steps:
|
||||
# - name: Deploy to GitHub Pages
|
||||
# id: deployment
|
||||
# uses: actions/deploy-pages@d6db90164ac5ed86f2b6aed7e0febac5b3c0c03e # v4.0.5
|
||||
# uses: actions/deploy-pages@v4
|
||||
|
|
|
|||
|
|
@ -41,14 +41,14 @@ jobs:
|
|||
DOCUSAURUS_INFRA: ['SLOWER', 'FASTER']
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
- name: Set up Node
|
||||
uses: actions/setup-node@395ad3262231945c25e8478fd5baf05154b1d79f # v6.1.0
|
||||
uses: actions/setup-node@a0853c24544627f65ddf259abe73b1d18a591444 # v5.0.0
|
||||
with:
|
||||
node-version: lts/*
|
||||
cache: yarn
|
||||
- name: Track build size changes
|
||||
uses: preactjs/compressed-size-action@8518045ed95e94e971b83333085e1cb99aa18aa8 # v2.9.0
|
||||
uses: preactjs/compressed-size-action@946a292cd35bd1088e0d7eb92b69d1a8d5b5d76a # v2
|
||||
with:
|
||||
repo-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
build-script: build:website:fast
|
||||
|
|
@ -62,7 +62,6 @@ jobs:
|
|||
comment-key: DOCUSAURUS_INFRA_${{ matrix.DOCUSAURUS_INFRA }}
|
||||
env:
|
||||
DOCUSAURUS_SLOWER: ${{ matrix.DOCUSAURUS_INFRA == 'SLOWER' && 'true' || 'false' }}
|
||||
DOCUSAURUS_PERF_LOGGER: 'true'
|
||||
|
||||
# Ensures build times stay under reasonable thresholds
|
||||
build-time:
|
||||
|
|
@ -74,9 +73,9 @@ jobs:
|
|||
DOCUSAURUS_INFRA: ['SLOWER', 'FASTER']
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
- name: Set up Node
|
||||
uses: actions/setup-node@395ad3262231945c25e8478fd5baf05154b1d79f # v6.1.0
|
||||
uses: actions/setup-node@a0853c24544627f65ddf259abe73b1d18a591444 # v5.0.0
|
||||
with:
|
||||
node-version: lts/*
|
||||
cache: yarn
|
||||
|
|
@ -89,7 +88,6 @@ jobs:
|
|||
timeout-minutes: ${{ matrix.DOCUSAURUS_INFRA == 'SLOWER' && 3 || 2 }}
|
||||
env:
|
||||
DOCUSAURUS_SLOWER: ${{ matrix.DOCUSAURUS_INFRA == 'SLOWER' && 'true' || 'false' }}
|
||||
DOCUSAURUS_PERF_LOGGER: 'true'
|
||||
|
||||
# Ensure build with a warm cache does not increase too much
|
||||
- name: Build (warm cache)
|
||||
|
|
@ -98,6 +96,5 @@ jobs:
|
|||
timeout-minutes: ${{ matrix.DOCUSAURUS_INFRA == 'SLOWER' && 1 || 2 }}
|
||||
env:
|
||||
DOCUSAURUS_SLOWER: ${{ matrix.DOCUSAURUS_INFRA == 'SLOWER' && 'true' || 'false' }}
|
||||
DOCUSAURUS_PERF_LOGGER: 'true'
|
||||
|
||||
# TODO post a GitHub comment with build with perf warnings?
|
||||
|
|
|
|||
|
|
@ -20,11 +20,11 @@ jobs:
|
|||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
with:
|
||||
fetch-depth: 0 # Needed to get the commit number with "git rev-list --count HEAD"
|
||||
- name: Set up Node
|
||||
uses: actions/setup-node@395ad3262231945c25e8478fd5baf05154b1d79f # v6.1.0
|
||||
uses: actions/setup-node@a0853c24544627f65ddf259abe73b1d18a591444 # v5.0.0
|
||||
with:
|
||||
node-version: lts/*
|
||||
cache: yarn
|
||||
|
|
|
|||
|
|
@ -33,12 +33,12 @@ jobs:
|
|||
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
|
||||
- name: Initialize CodeQL
|
||||
uses: github/codeql-action/init@4e94bd11f71e507f7f87df81788dff88d1dacbfb # 4.31.0
|
||||
uses: github/codeql-action/init@f443b600d91635bebf5b0d9ebc620189c0d6fba5 # 4.30.8
|
||||
with:
|
||||
languages: ${{ matrix.language }}
|
||||
|
||||
- name: Perform CodeQL Analysis
|
||||
uses: github/codeql-action/analyze@4e94bd11f71e507f7f87df81788dff88d1dacbfb # 4.31.0
|
||||
uses: github/codeql-action/analyze@f443b600d91635bebf5b0d9ebc620189c0d6fba5 # 4.30.8
|
||||
|
|
|
|||
|
|
@ -18,10 +18,10 @@ jobs:
|
|||
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
|
||||
- name: Set up Node
|
||||
uses: actions/setup-node@395ad3262231945c25e8478fd5baf05154b1d79f # v6.1.0
|
||||
uses: actions/setup-node@a0853c24544627f65ddf259abe73b1d18a591444 # v5.0.0
|
||||
with:
|
||||
node-version: lts/*
|
||||
cache: yarn
|
||||
|
|
|
|||
|
|
@ -13,6 +13,6 @@ jobs:
|
|||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
- name: Dependency Review
|
||||
uses: actions/dependency-review-action@3c4e3dcb1aa7874d2c16be7d79418e9b7efd6261 # 4.8.2
|
||||
uses: actions/dependency-review-action@40c09b7dc99638e5ddb0bfd91c1673effc064d8a # 4.8.1
|
||||
|
|
|
|||
|
|
@ -21,10 +21,10 @@ jobs:
|
|||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
|
||||
- name: Use Node.js
|
||||
uses: actions/setup-node@395ad3262231945c25e8478fd5baf05154b1d79f # v6.1.0
|
||||
uses: actions/setup-node@a0853c24544627f65ddf259abe73b1d18a591444 # v5.0.0
|
||||
with:
|
||||
node-version: lts/*
|
||||
cache: yarn
|
||||
|
|
|
|||
|
|
@ -19,7 +19,7 @@ jobs:
|
|||
contents: write
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
|
||||
- uses: actions/checkout@v5
|
||||
with:
|
||||
repository: ${{ github.event.pull_request.head.repo.full_name }}
|
||||
ref: ${{ github.head_ref }}
|
||||
|
|
@ -42,6 +42,6 @@ jobs:
|
|||
- name: Print Diff
|
||||
run: git diff
|
||||
|
||||
- uses: stefanzweifel/git-auto-commit-action@04702edda442b2e678b25b537cec683a1493fcb9 # v7.1.0
|
||||
- uses: stefanzweifel/git-auto-commit-action@v7
|
||||
with:
|
||||
commit_message: 'refactor: apply lint autofix'
|
||||
|
|
|
|||
|
|
@ -20,9 +20,9 @@ jobs:
|
|||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
- name: Set up Node
|
||||
uses: actions/setup-node@395ad3262231945c25e8478fd5baf05154b1d79f # v6.1.0
|
||||
uses: actions/setup-node@a0853c24544627f65ddf259abe73b1d18a591444 # v5.0.0
|
||||
with:
|
||||
node-version: lts/*
|
||||
cache: yarn
|
||||
|
|
|
|||
|
|
@ -22,9 +22,9 @@ jobs:
|
|||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
- name: Set up Node
|
||||
uses: actions/setup-node@395ad3262231945c25e8478fd5baf05154b1d79f # v6.1.0
|
||||
uses: actions/setup-node@a0853c24544627f65ddf259abe73b1d18a591444 # v5.0.0
|
||||
with:
|
||||
node-version: lts/*
|
||||
cache: yarn
|
||||
|
|
|
|||
|
|
@ -38,12 +38,12 @@ jobs:
|
|||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
matrix:
|
||||
node: ['20.0', '20', '22', '24', '25.1']
|
||||
node: ['20.0', '20', '22', '24']
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
- name: Use Node.js ${{ matrix.node }}
|
||||
uses: actions/setup-node@395ad3262231945c25e8478fd5baf05154b1d79f # v6.1.0
|
||||
uses: actions/setup-node@a0853c24544627f65ddf259abe73b1d18a591444 # v5.0.0
|
||||
with:
|
||||
node-version: ${{ matrix.node }}
|
||||
cache: yarn
|
||||
|
|
@ -78,9 +78,9 @@ jobs:
|
|||
runs-on: windows-8-core
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
- name: Use Node.js LTS
|
||||
uses: actions/setup-node@395ad3262231945c25e8478fd5baf05154b1d79f # v6.1.0
|
||||
uses: actions/setup-node@a0853c24544627f65ddf259abe73b1d18a591444 # v5.0.0
|
||||
with:
|
||||
node-version: lts/*
|
||||
cache: yarn
|
||||
|
|
@ -109,7 +109,7 @@ jobs:
|
|||
DOCUSAURUS_PERF_LOGGER: 'true'
|
||||
working-directory: test-website-in-workspace
|
||||
- name: Upload Website artifact
|
||||
uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: website-e2e-windows
|
||||
path: test-website-in-workspace/build
|
||||
|
|
@ -124,9 +124,9 @@ jobs:
|
|||
variant: [-s, -st]
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
- name: Use Node.js LTS
|
||||
uses: actions/setup-node@395ad3262231945c25e8478fd5baf05154b1d79f # v6.1.0
|
||||
uses: actions/setup-node@a0853c24544627f65ddf259abe73b1d18a591444 # v5.0.0
|
||||
with:
|
||||
node-version: lts/*
|
||||
cache: yarn
|
||||
|
|
@ -193,9 +193,9 @@ jobs:
|
|||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
- name: Use Node.js LTS
|
||||
uses: actions/setup-node@395ad3262231945c25e8478fd5baf05154b1d79f # v6.1.0
|
||||
uses: actions/setup-node@a0853c24544627f65ddf259abe73b1d18a591444 # v5.0.0
|
||||
with:
|
||||
node-version: lts/*
|
||||
cache: yarn
|
||||
|
|
@ -233,9 +233,9 @@ jobs:
|
|||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
- name: Use Node.js LTS
|
||||
uses: actions/setup-node@395ad3262231945c25e8478fd5baf05154b1d79f # v6.1.0
|
||||
uses: actions/setup-node@a0853c24544627f65ddf259abe73b1d18a591444 # v5.0.0
|
||||
with:
|
||||
node-version: lts/*
|
||||
cache: yarn
|
||||
|
|
|
|||
|
|
@ -26,9 +26,9 @@ jobs:
|
|||
variant: ['js', 'ts']
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
- name: Set up Node LTS
|
||||
uses: actions/setup-node@395ad3262231945c25e8478fd5baf05154b1d79f # v6.1.0
|
||||
uses: actions/setup-node@a0853c24544627f65ddf259abe73b1d18a591444 # v5.0.0
|
||||
with:
|
||||
node-version: lts/*
|
||||
cache: yarn
|
||||
|
|
|
|||
|
|
@ -27,14 +27,14 @@ jobs:
|
|||
runs-on: windows-latest
|
||||
strategy:
|
||||
matrix:
|
||||
node: ['20.0', '20', '22', '24', '25.1']
|
||||
node: ['20.0', '20', '22', '24']
|
||||
steps:
|
||||
- name: Support longpaths
|
||||
run: git config --system core.longpaths true
|
||||
- name: Checkout
|
||||
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
- name: Use Node.js ${{ matrix.node }}
|
||||
uses: actions/setup-node@395ad3262231945c25e8478fd5baf05154b1d79f # v6.1.0
|
||||
uses: actions/setup-node@a0853c24544627f65ddf259abe73b1d18a591444 # v5.0.0
|
||||
with:
|
||||
node-version: ${{ matrix.node }}
|
||||
cache: yarn
|
||||
|
|
@ -54,8 +54,6 @@ jobs:
|
|||
run: yarn workspace website test:swizzle:wrap:ts
|
||||
- name: Docusaurus Build
|
||||
run: yarn build:website:fast
|
||||
env:
|
||||
DOCUSAURUS_PERF_LOGGER: 'true'
|
||||
|
||||
- name: TypeCheck website
|
||||
# see https://github.com/facebook/docusaurus/pull/10486
|
||||
|
|
|
|||
|
|
@ -27,12 +27,12 @@ jobs:
|
|||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
matrix:
|
||||
node: ['20.0', '20', '22', '24', '25.1']
|
||||
node: ['20.0', '20', '22', '24']
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
- name: Use Node.js ${{ matrix.node }}
|
||||
uses: actions/setup-node@395ad3262231945c25e8478fd5baf05154b1d79f # v6.1.0
|
||||
uses: actions/setup-node@a0853c24544627f65ddf259abe73b1d18a591444 # v5.0.0
|
||||
with:
|
||||
node-version: ${{ matrix.node }}
|
||||
cache: yarn
|
||||
|
|
|
|||
|
|
@ -1,7 +1,7 @@
|
|||
dist
|
||||
node_modules
|
||||
.yarn
|
||||
**/build/**
|
||||
build
|
||||
coverage
|
||||
.docusaurus
|
||||
.idea
|
||||
|
|
@ -11,8 +11,6 @@ coverage
|
|||
|
||||
jest/vendor
|
||||
|
||||
argos/test-results
|
||||
|
||||
packages/lqip-loader/lib/
|
||||
packages/docusaurus/lib/
|
||||
packages/docusaurus-*/lib/*
|
||||
|
|
|
|||
|
|
@ -45,7 +45,7 @@ Short on time? Check out our [5-minute tutorial ⏱️](https://tutorial.docusau
|
|||
|
||||
- **Customizable**
|
||||
|
||||
> While Docusaurus ships with the key pages and sections you need to get started, including a home page, a docs section, a [blog](https://docusaurus.io/docs/blog), and additional support pages, it is also [customizable](https://docusaurus.io/docs/creating-pages) to ensure you have a site that is [uniquely yours](https://docusaurus.io/docs/styling-layout).
|
||||
> While Docusaurus ships with the key pages and sections you need to get started, including a home page, a docs section, a [blog](https://docusaurus.io/docs/blog), and additional support pages, it is also [customizable](https://docusaurus.io/docs/creating-pages) as well to ensure you have a site that is [uniquely yours](https://docusaurus.io/docs/styling-layout).
|
||||
|
||||
## Installation
|
||||
|
||||
|
|
|
|||
|
|
@ -16,8 +16,8 @@
|
|||
"dev": "docusaurus start"
|
||||
},
|
||||
"dependencies": {
|
||||
"@docusaurus/core": "3.9.2",
|
||||
"@docusaurus/preset-classic": "3.9.2",
|
||||
"@docusaurus/core": "3.9.0",
|
||||
"@docusaurus/preset-classic": "3.9.0",
|
||||
"@mdx-js/react": "^3.0.0",
|
||||
"clsx": "^2.0.0",
|
||||
"prism-react-renderer": "^2.3.0",
|
||||
|
|
@ -25,9 +25,9 @@
|
|||
"react-dom": "^19.0.0"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@docusaurus/module-type-aliases": "3.9.2",
|
||||
"@docusaurus/tsconfig": "3.9.2",
|
||||
"@docusaurus/types": "3.9.2",
|
||||
"@docusaurus/module-type-aliases": "3.9.0",
|
||||
"@docusaurus/tsconfig": "3.9.0",
|
||||
"@docusaurus/types": "3.9.0",
|
||||
"typescript": "~5.6.2"
|
||||
},
|
||||
"browserslist": {
|
||||
|
|
|
|||
File diff suppressed because it is too large
Load Diff
|
|
@ -15,8 +15,8 @@
|
|||
"dev": "docusaurus start"
|
||||
},
|
||||
"dependencies": {
|
||||
"@docusaurus/core": "3.9.2",
|
||||
"@docusaurus/preset-classic": "3.9.2",
|
||||
"@docusaurus/core": "3.9.0",
|
||||
"@docusaurus/preset-classic": "3.9.0",
|
||||
"@mdx-js/react": "^3.0.0",
|
||||
"clsx": "^2.0.0",
|
||||
"prism-react-renderer": "^2.3.0",
|
||||
|
|
@ -24,8 +24,8 @@
|
|||
"react-dom": "^19.0.0"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@docusaurus/module-type-aliases": "3.9.2",
|
||||
"@docusaurus/types": "3.9.2"
|
||||
"@docusaurus/module-type-aliases": "3.9.0",
|
||||
"@docusaurus/types": "3.9.0"
|
||||
},
|
||||
"browserslist": {
|
||||
"production": [
|
||||
|
|
|
|||
File diff suppressed because it is too large
Load Diff
|
|
@ -12,3 +12,8 @@ declare module 'to-vfile' {
|
|||
|
||||
export function read(path: string, encoding?: string): Promise<VFile>;
|
||||
}
|
||||
|
||||
declare module '@testing-utils/git' {
|
||||
const createTempRepo: typeof import('./utils/git').createTempRepo;
|
||||
export {createTempRepo};
|
||||
}
|
||||
|
|
|
|||
|
|
@ -82,7 +82,7 @@ function normalizePaths<T>(value: T): T {
|
|||
(val) => val.split(cwdReal).join('<PROJECT_ROOT>'),
|
||||
(val) => val.split(cwd).join('<PROJECT_ROOT>'),
|
||||
|
||||
// Replace temp directory with <TEMP_DIR>
|
||||
// Replace home directory with <TEMP_DIR>
|
||||
(val) => val.split(tempDirReal).join('<TEMP_DIR>'),
|
||||
(val) => val.split(tempDir).join('<TEMP_DIR>'),
|
||||
|
||||
|
|
|
|||
|
|
@ -0,0 +1,63 @@
|
|||
/**
|
||||
* Copyright (c) Facebook, Inc. and its affiliates.
|
||||
*
|
||||
* This source code is licensed under the MIT license found in the
|
||||
* LICENSE file in the root directory of this source tree.
|
||||
*/
|
||||
|
||||
import fs from 'fs-extra';
|
||||
import os from 'os';
|
||||
import path from 'path';
|
||||
import shell from 'shelljs';
|
||||
|
||||
class Git {
|
||||
constructor(private dir: string) {
|
||||
const res = shell.exec('git init', {cwd: dir, silent: true});
|
||||
if (res.code !== 0) {
|
||||
throw new Error(`git init exited with code ${res.code}.
|
||||
stderr: ${res.stderr}
|
||||
stdout: ${res.stdout}`);
|
||||
}
|
||||
// Doesn't matter currently
|
||||
shell.exec('git config user.email "test@jc-verse.com"', {
|
||||
cwd: dir,
|
||||
silent: true,
|
||||
});
|
||||
shell.exec('git config user.name "Test"', {cwd: dir, silent: true});
|
||||
|
||||
shell.exec('git commit --allow-empty -m "First commit"', {
|
||||
cwd: dir,
|
||||
silent: true,
|
||||
});
|
||||
}
|
||||
commit(msg: string, date: string, author: string): void {
|
||||
const addRes = shell.exec('git add .', {cwd: this.dir, silent: true});
|
||||
const commitRes = shell.exec(
|
||||
`git commit -m "${msg}" --date "${date}T00:00:00Z" --author "${author}"`,
|
||||
{
|
||||
cwd: this.dir,
|
||||
env: {GIT_COMMITTER_DATE: `${date}T00:00:00Z`},
|
||||
silent: true,
|
||||
},
|
||||
);
|
||||
if (addRes.code !== 0) {
|
||||
throw new Error(`git add exited with code ${addRes.code}.
|
||||
stderr: ${addRes.stderr}
|
||||
stdout: ${addRes.stdout}`);
|
||||
}
|
||||
if (commitRes.code !== 0) {
|
||||
throw new Error(`git commit exited with code ${commitRes.code}.
|
||||
stderr: ${commitRes.stderr}
|
||||
stdout: ${commitRes.stdout}`);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// This function is sync so the same mock repo can be shared across tests
|
||||
export function createTempRepo(): {repoDir: string; git: Git} {
|
||||
const repoDir = fs.mkdtempSync(path.join(os.tmpdir(), 'git-test-repo'));
|
||||
|
||||
const git = new Git(repoDir);
|
||||
|
||||
return {repoDir, git};
|
||||
}
|
||||
|
|
@ -25,7 +25,7 @@
|
|||
"@docusaurus/logger": "3.9.2",
|
||||
"@docusaurus/utils": "3.9.2",
|
||||
"commander": "^5.1.0",
|
||||
"execa": "^5.1.1",
|
||||
"execa": "5.1.1",
|
||||
"fs-extra": "^11.1.1",
|
||||
"lodash": "^4.17.21",
|
||||
"prompts": "^2.4.2",
|
||||
|
|
|
|||
|
|
@ -273,10 +273,7 @@ async function getSiteName(
|
|||
return 'A website name is required.';
|
||||
}
|
||||
const dest = path.resolve(rootDir, siteName);
|
||||
if (siteName === '.' && (await fs.readdir(dest)).length > 0) {
|
||||
return logger.interpolate`Directory not empty at path=${dest}!`;
|
||||
}
|
||||
if (siteName !== '.' && (await fs.pathExists(dest))) {
|
||||
if (await fs.pathExists(dest)) {
|
||||
return logger.interpolate`Directory already exists at path=${dest}!`;
|
||||
}
|
||||
return true;
|
||||
|
|
|
|||
|
|
@ -36,6 +36,7 @@
|
|||
"@babel/preset-react": "^7.25.9",
|
||||
"@babel/preset-typescript": "^7.25.9",
|
||||
"@babel/runtime": "^7.25.9",
|
||||
"@babel/runtime-corejs3": "^7.25.9",
|
||||
"@babel/traverse": "^7.25.9",
|
||||
"@docusaurus/logger": "3.9.2",
|
||||
"@docusaurus/utils": "3.9.2",
|
||||
|
|
|
|||
|
|
@ -28,7 +28,7 @@ async function createSwcJsLoaderFactory(): Promise<
|
|||
return ({isServer}) => {
|
||||
return {
|
||||
loader,
|
||||
options: getOptions({isServer, bundlerName: 'webpack'}),
|
||||
options: getOptions({isServer}),
|
||||
};
|
||||
};
|
||||
}
|
||||
|
|
@ -42,7 +42,7 @@ async function createRspackSwcJsLoaderFactory(): Promise<
|
|||
return ({isServer}) => {
|
||||
return {
|
||||
loader,
|
||||
options: getOptions({isServer, bundlerName: 'rspack'}),
|
||||
options: getOptions({isServer}),
|
||||
};
|
||||
};
|
||||
}
|
||||
|
|
|
|||
|
|
@ -142,10 +142,7 @@ async function getRspackMinimizers({
|
|||
}: MinimizersConfig): Promise<WebpackPluginInstance[]> {
|
||||
const rspack = getCurrentBundlerAsRspack({currentBundler});
|
||||
const getBrowserslistQueries = await importGetBrowserslistQueries();
|
||||
const browserslistQueries = getBrowserslistQueries({
|
||||
isServer: false,
|
||||
bundlerName: 'rspack',
|
||||
});
|
||||
const browserslistQueries = getBrowserslistQueries({isServer: false});
|
||||
const swcJsMinimizerOptions = await importSwcJsMinimizerOptions();
|
||||
return [
|
||||
// See https://rspack.dev/plugins/rspack/swc-js-minimizer-rspack-plugin
|
||||
|
|
|
|||
|
|
@ -24,7 +24,6 @@
|
|||
"@swc/html": "^1.13.5",
|
||||
"browserslist": "^4.24.2",
|
||||
"lightningcss": "^1.27.0",
|
||||
"semver": "^7.5.4",
|
||||
"swc-loader": "^0.2.6",
|
||||
"tslib": "^2.6.0",
|
||||
"webpack": "^5.95.0"
|
||||
|
|
|
|||
|
|
@ -9,22 +9,18 @@ import Rspack from '@rspack/core';
|
|||
import * as lightningcss from 'lightningcss';
|
||||
import browserslist from 'browserslist';
|
||||
import {minify as swcHtmlMinifier} from '@swc/html';
|
||||
import semver from 'semver';
|
||||
import type {JsMinifyOptions, Options as SwcOptions} from '@swc/core';
|
||||
import type {CurrentBundler} from '@docusaurus/types';
|
||||
|
||||
export const swcLoader = require.resolve('swc-loader');
|
||||
|
||||
export const getSwcLoaderOptions = ({
|
||||
isServer,
|
||||
bundlerName,
|
||||
}: {
|
||||
isServer: boolean;
|
||||
bundlerName: CurrentBundler['name'];
|
||||
}): SwcOptions => {
|
||||
return {
|
||||
env: {
|
||||
targets: getBrowserslistQueries({isServer, bundlerName}),
|
||||
targets: getBrowserslistQueries({isServer}),
|
||||
},
|
||||
jsc: {
|
||||
parser: {
|
||||
|
|
@ -67,53 +63,20 @@ export function getSwcJsMinimizerOptions(): JsMinifyOptions {
|
|||
};
|
||||
}
|
||||
|
||||
// TODO this is not accurate
|
||||
// for Rspack we should read from the built-in browserslist data
|
||||
// see https://github.com/facebook/docusaurus/pull/11496
|
||||
function getLastBrowserslistKnownNodeVersion(
|
||||
bundlerName: CurrentBundler['name'],
|
||||
): string {
|
||||
if (bundlerName === 'rspack') {
|
||||
// TODO hardcoded value until Rspack exposes its Browserslist data
|
||||
// see https://github.com/facebook/docusaurus/pull/11496
|
||||
return '22.0.0';
|
||||
}
|
||||
// browserslist('last 1 node versions')[0]!.replace('node ', '')
|
||||
return browserslist.nodeVersions.at(-1)!;
|
||||
}
|
||||
|
||||
function getMinVersion(v1: string, v2: string): string {
|
||||
return semver.lt(v1, v2) ? v1 : v2;
|
||||
}
|
||||
|
||||
// We need this because of Rspack built-in LightningCSS integration
|
||||
// See https://github.com/orgs/browserslist/discussions/846
|
||||
export function getBrowserslistQueries({
|
||||
isServer,
|
||||
bundlerName,
|
||||
}: {
|
||||
isServer: boolean;
|
||||
bundlerName: CurrentBundler['name'];
|
||||
}): string[] {
|
||||
if (isServer) {
|
||||
// Escape hatch env variable
|
||||
if (process.env.DOCUSAURUS_SERVER_NODE_TARGET) {
|
||||
return [`node ${process.env.DOCUSAURUS_SERVER_NODE_TARGET}`];
|
||||
}
|
||||
// For server builds, we want to use the current Node version as target
|
||||
// But we can't pass a target that Browserslist doesn't know about yet
|
||||
const nodeTarget = getMinVersion(
|
||||
process.versions.node,
|
||||
getLastBrowserslistKnownNodeVersion(bundlerName),
|
||||
);
|
||||
|
||||
return [`node ${nodeTarget}`];
|
||||
return [`node ${process.versions.node}`];
|
||||
}
|
||||
|
||||
const queries = browserslist.loadConfig({path: process.cwd()}) ?? [
|
||||
...browserslist.defaults,
|
||||
];
|
||||
|
||||
return queries;
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -6,6 +6,7 @@
|
|||
*/
|
||||
|
||||
import path from 'path';
|
||||
import url from 'url';
|
||||
import fs from 'fs-extra';
|
||||
import {
|
||||
toMessageRelativeFilePath,
|
||||
|
|
@ -14,7 +15,6 @@ import {
|
|||
findAsyncSequential,
|
||||
getFileLoaderUtils,
|
||||
parseURLOrPath,
|
||||
parseLocalURLPath,
|
||||
} from '@docusaurus/utils';
|
||||
import escapeHtml from 'escape-html';
|
||||
import {imageSizeFromFile} from 'image-size/fromFile';
|
||||
|
|
@ -207,11 +207,11 @@ async function processImageNode(target: Target, context: Context) {
|
|||
return;
|
||||
}
|
||||
|
||||
const localUrlPath = parseLocalURLPath(node.url);
|
||||
if (!localUrlPath) {
|
||||
// pathname:// is an escape hatch, in case the user does not want images to
|
||||
const parsedUrl = url.parse(node.url);
|
||||
if (parsedUrl.protocol || !parsedUrl.pathname) {
|
||||
// pathname:// is an escape hatch, in case user does not want her images to
|
||||
// be converted to require calls going through webpack loader
|
||||
if (parseURLOrPath(node.url).protocol === 'pathname:') {
|
||||
if (parsedUrl.protocol === 'pathname:') {
|
||||
node.url = node.url.replace('pathname://', '');
|
||||
}
|
||||
return;
|
||||
|
|
@ -220,7 +220,7 @@ async function processImageNode(target: Target, context: Context) {
|
|||
// We decode it first because Node Url.pathname is always encoded
|
||||
// while the image file-system path are not.
|
||||
// See https://github.com/facebook/docusaurus/discussions/10720
|
||||
const decodedPathname = decodeURIComponent(localUrlPath.pathname);
|
||||
const decodedPathname = decodeURIComponent(parsedUrl.pathname);
|
||||
|
||||
// We try to convert image urls without protocol to images with require calls
|
||||
// going through webpack ensures that image assets exist at build time
|
||||
|
|
|
|||
|
|
@ -6,6 +6,7 @@
|
|||
*/
|
||||
|
||||
import path from 'path';
|
||||
import url from 'url';
|
||||
import fs from 'fs-extra';
|
||||
import {
|
||||
toMessageRelativeFilePath,
|
||||
|
|
@ -14,7 +15,6 @@ import {
|
|||
findAsyncSequential,
|
||||
getFileLoaderUtils,
|
||||
parseURLOrPath,
|
||||
parseLocalURLPath,
|
||||
} from '@docusaurus/utils';
|
||||
import escapeHtml from 'escape-html';
|
||||
import logger from '@docusaurus/logger';
|
||||
|
|
@ -209,22 +209,21 @@ async function processLinkNode(target: Target, context: Context) {
|
|||
return;
|
||||
}
|
||||
|
||||
const localUrlPath = parseLocalURLPath(node.url);
|
||||
if (!localUrlPath) {
|
||||
const parsedUrl = url.parse(node.url);
|
||||
if (parsedUrl.protocol || !parsedUrl.pathname) {
|
||||
// Don't process pathname:// here, it's used by the <Link> component
|
||||
return;
|
||||
}
|
||||
|
||||
const hasSiteAlias = localUrlPath.pathname.startsWith('@site/');
|
||||
const hasSiteAlias = parsedUrl.pathname.startsWith('@site/');
|
||||
const hasAssetLikeExtension =
|
||||
path.extname(localUrlPath.pathname) &&
|
||||
!localUrlPath.pathname.match(/\.(?:mdx?|html)(?:#|$)/);
|
||||
path.extname(parsedUrl.pathname) &&
|
||||
!parsedUrl.pathname.match(/\.(?:mdx?|html)(?:#|$)/);
|
||||
if (!hasSiteAlias && !hasAssetLikeExtension) {
|
||||
return;
|
||||
}
|
||||
|
||||
const localFilePath = await getLocalFileAbsolutePath(
|
||||
decodeURIComponent(localUrlPath.pathname),
|
||||
decodeURIComponent(parsedUrl.pathname),
|
||||
context,
|
||||
);
|
||||
|
||||
|
|
|
|||
|
|
@ -1,637 +0,0 @@
|
|||
// Jest Snapshot v1, https://goo.gl/fbAQLP
|
||||
|
||||
exports[`buildAllRoutes works for realistic blog post 2`] = `
|
||||
[
|
||||
{
|
||||
"component": "@theme/BlogPostPage",
|
||||
"context": {
|
||||
"blogMetadata": "@aliased/data/blogMetadata-default.json",
|
||||
},
|
||||
"exact": true,
|
||||
"metadata": {
|
||||
"lastUpdatedAt": undefined,
|
||||
"sourceFilePath": "blog/post1.md",
|
||||
},
|
||||
"modules": {
|
||||
"content": "@site/blog/post1.md",
|
||||
"sidebar": "@aliased/data/blog-post-list-prop-default.json",
|
||||
},
|
||||
"path": "/blog/post1",
|
||||
},
|
||||
{
|
||||
"component": "@theme/BlogPostPage",
|
||||
"context": {
|
||||
"blogMetadata": "@aliased/data/blogMetadata-default.json",
|
||||
},
|
||||
"exact": true,
|
||||
"metadata": {
|
||||
"lastUpdatedAt": undefined,
|
||||
"sourceFilePath": "blog/post2.md",
|
||||
},
|
||||
"modules": {
|
||||
"content": "@site/blog/post2.md",
|
||||
"sidebar": "@aliased/data/blog-post-list-prop-default.json",
|
||||
},
|
||||
"path": "/blog/post2",
|
||||
},
|
||||
{
|
||||
"component": "@theme/BlogPostPage",
|
||||
"context": {
|
||||
"blogMetadata": "@aliased/data/blogMetadata-default.json",
|
||||
},
|
||||
"exact": true,
|
||||
"metadata": {
|
||||
"lastUpdatedAt": undefined,
|
||||
"sourceFilePath": "blog/post3.md",
|
||||
},
|
||||
"modules": {
|
||||
"content": "@site/blog/post3.md",
|
||||
"sidebar": "@aliased/data/blog-post-list-prop-default.json",
|
||||
},
|
||||
"path": "/blog/post3",
|
||||
},
|
||||
{
|
||||
"component": "@theme/BlogPostPage",
|
||||
"context": {
|
||||
"blogMetadata": "@aliased/data/blogMetadata-default.json",
|
||||
},
|
||||
"exact": true,
|
||||
"metadata": {
|
||||
"lastUpdatedAt": undefined,
|
||||
"sourceFilePath": "blog/post4.md",
|
||||
},
|
||||
"modules": {
|
||||
"content": "@site/blog/post4.md",
|
||||
"sidebar": "@aliased/data/blog-post-list-prop-default.json",
|
||||
},
|
||||
"path": "/blog/post4",
|
||||
},
|
||||
{
|
||||
"component": "@theme/BlogPostPage",
|
||||
"context": {
|
||||
"blogMetadata": "@aliased/data/blogMetadata-default.json",
|
||||
},
|
||||
"exact": true,
|
||||
"metadata": {
|
||||
"lastUpdatedAt": undefined,
|
||||
"sourceFilePath": "blog/post5.md",
|
||||
},
|
||||
"modules": {
|
||||
"content": "@site/blog/post5.md",
|
||||
"sidebar": "@aliased/data/blog-post-list-prop-default.json",
|
||||
},
|
||||
"path": "/blog/post5",
|
||||
},
|
||||
{
|
||||
"component": "@theme/BlogPostPage",
|
||||
"context": {
|
||||
"blogMetadata": "@aliased/data/blogMetadata-default.json",
|
||||
},
|
||||
"exact": true,
|
||||
"metadata": {
|
||||
"lastUpdatedAt": undefined,
|
||||
"sourceFilePath": "blog/post6.md",
|
||||
},
|
||||
"modules": {
|
||||
"content": "@site/blog/post6.md",
|
||||
"sidebar": "@aliased/data/blog-post-list-prop-default.json",
|
||||
},
|
||||
"path": "/blog/post6",
|
||||
},
|
||||
{
|
||||
"component": "@theme/BlogListPage",
|
||||
"exact": true,
|
||||
"modules": {
|
||||
"items": [
|
||||
{
|
||||
"content": {
|
||||
"__import": true,
|
||||
"path": "@site/blog/post1.md",
|
||||
"query": {
|
||||
"truncated": true,
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
"content": {
|
||||
"__import": true,
|
||||
"path": "@site/blog/post2.md",
|
||||
"query": {
|
||||
"truncated": true,
|
||||
},
|
||||
},
|
||||
},
|
||||
],
|
||||
"sidebar": "@aliased/data/blog-post-list-prop-default.json",
|
||||
},
|
||||
"path": "/blog",
|
||||
"props": {
|
||||
"metadata": {
|
||||
"blogDescription": "Custom blog description",
|
||||
"blogTitle": "Custom blog title",
|
||||
"nextPage": "/blog/page/2",
|
||||
"page": 1,
|
||||
"permalink": "/blog",
|
||||
"postsPerPage": 2,
|
||||
"previousPage": undefined,
|
||||
"totalCount": 5,
|
||||
"totalPages": 3,
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
"component": "@theme/BlogListPage",
|
||||
"exact": true,
|
||||
"modules": {
|
||||
"items": [
|
||||
{
|
||||
"content": {
|
||||
"__import": true,
|
||||
"path": "@site/blog/post4.md",
|
||||
"query": {
|
||||
"truncated": true,
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
"content": {
|
||||
"__import": true,
|
||||
"path": "@site/blog/post5.md",
|
||||
"query": {
|
||||
"truncated": true,
|
||||
},
|
||||
},
|
||||
},
|
||||
],
|
||||
"sidebar": "@aliased/data/blog-post-list-prop-default.json",
|
||||
},
|
||||
"path": "/blog/page/2",
|
||||
"props": {
|
||||
"metadata": {
|
||||
"blogDescription": "Custom blog description",
|
||||
"blogTitle": "Custom blog title",
|
||||
"nextPage": "/blog/page/3",
|
||||
"page": 2,
|
||||
"permalink": "/blog/page/2",
|
||||
"postsPerPage": 2,
|
||||
"previousPage": "/blog",
|
||||
"totalCount": 5,
|
||||
"totalPages": 3,
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
"component": "@theme/BlogListPage",
|
||||
"exact": true,
|
||||
"modules": {
|
||||
"items": [
|
||||
{
|
||||
"content": {
|
||||
"__import": true,
|
||||
"path": "@site/blog/post6.md",
|
||||
"query": {
|
||||
"truncated": true,
|
||||
},
|
||||
},
|
||||
},
|
||||
],
|
||||
"sidebar": "@aliased/data/blog-post-list-prop-default.json",
|
||||
},
|
||||
"path": "/blog/page/3",
|
||||
"props": {
|
||||
"metadata": {
|
||||
"blogDescription": "Custom blog description",
|
||||
"blogTitle": "Custom blog title",
|
||||
"nextPage": undefined,
|
||||
"page": 3,
|
||||
"permalink": "/blog/page/3",
|
||||
"postsPerPage": 2,
|
||||
"previousPage": "/blog/page/2",
|
||||
"totalCount": 5,
|
||||
"totalPages": 3,
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
"component": "@theme/BlogArchivePage",
|
||||
"exact": true,
|
||||
"path": "/blog/archive",
|
||||
"props": {
|
||||
"archive": {
|
||||
"blogPosts": [
|
||||
{
|
||||
"content": "Content for post1",
|
||||
"id": "post1",
|
||||
"metadata": {
|
||||
"authors": [
|
||||
{
|
||||
"key": "author1",
|
||||
},
|
||||
],
|
||||
"date": 2020-01-01T00:00:00.000Z,
|
||||
"description": "Description for post1",
|
||||
"frontMatter": {},
|
||||
"permalink": "/blog/post1",
|
||||
"readingTime": 2,
|
||||
"source": "@site/blog/post1.md",
|
||||
"tags": [],
|
||||
"title": "Title for post1",
|
||||
},
|
||||
},
|
||||
{
|
||||
"content": "Content for post2",
|
||||
"id": "post2",
|
||||
"metadata": {
|
||||
"authors": [
|
||||
{
|
||||
"key": "author1",
|
||||
},
|
||||
],
|
||||
"date": 2020-01-01T00:00:00.000Z,
|
||||
"description": "Description for post2",
|
||||
"frontMatter": {},
|
||||
"permalink": "/blog/post2",
|
||||
"readingTime": 2,
|
||||
"source": "@site/blog/post2.md",
|
||||
"tags": [],
|
||||
"title": "Title for post2",
|
||||
},
|
||||
},
|
||||
{
|
||||
"content": "Content for post4",
|
||||
"id": "post4",
|
||||
"metadata": {
|
||||
"authors": [
|
||||
{
|
||||
"key": "author1",
|
||||
},
|
||||
{
|
||||
"key": "author2",
|
||||
},
|
||||
],
|
||||
"date": 2020-01-01T00:00:00.000Z,
|
||||
"description": "Description for post4",
|
||||
"frontMatter": {},
|
||||
"permalink": "/blog/post4",
|
||||
"readingTime": 2,
|
||||
"source": "@site/blog/post4.md",
|
||||
"tags": [],
|
||||
"title": "Title for post4",
|
||||
},
|
||||
},
|
||||
{
|
||||
"content": "Content for post5",
|
||||
"id": "post5",
|
||||
"metadata": {
|
||||
"authors": [
|
||||
{
|
||||
"key": "author2",
|
||||
},
|
||||
{
|
||||
"key": "author3",
|
||||
},
|
||||
],
|
||||
"date": 2020-01-01T00:00:00.000Z,
|
||||
"description": "Description for post5",
|
||||
"frontMatter": {},
|
||||
"permalink": "/blog/post5",
|
||||
"readingTime": 2,
|
||||
"source": "@site/blog/post5.md",
|
||||
"tags": [],
|
||||
"title": "Title for post5",
|
||||
},
|
||||
},
|
||||
{
|
||||
"content": "Content for post6",
|
||||
"id": "post6",
|
||||
"metadata": {
|
||||
"authors": [],
|
||||
"date": 2020-01-01T00:00:00.000Z,
|
||||
"description": "Description for post6",
|
||||
"frontMatter": {},
|
||||
"permalink": "/blog/post6",
|
||||
"readingTime": 2,
|
||||
"source": "@site/blog/post6.md",
|
||||
"tags": [],
|
||||
"title": "Title for post6",
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
"component": "@theme/Blog/Pages/BlogAuthorsListPage",
|
||||
"context": {
|
||||
"blogMetadata": "@aliased/data/blogMetadata-default.json",
|
||||
},
|
||||
"exact": true,
|
||||
"modules": {
|
||||
"sidebar": "@aliased/data/blog-post-list-prop-default.json",
|
||||
},
|
||||
"path": "/blog/authors",
|
||||
"props": {
|
||||
"authors": [
|
||||
{
|
||||
"count": 3,
|
||||
"key": "author1",
|
||||
"name": "Author 1",
|
||||
"page": {
|
||||
"permalink": "/blog/authors/author1",
|
||||
},
|
||||
},
|
||||
{
|
||||
"count": 2,
|
||||
"key": "author2",
|
||||
"name": "Author 2",
|
||||
"page": null,
|
||||
},
|
||||
{
|
||||
"count": 1,
|
||||
"key": "author3",
|
||||
"name": "Author 3",
|
||||
"page": {
|
||||
"permalink": "/blog/authors/author3",
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
{
|
||||
"component": "@theme/Blog/Pages/BlogAuthorsPostsPage",
|
||||
"context": {
|
||||
"blogMetadata": "@aliased/data/blogMetadata-default.json",
|
||||
},
|
||||
"exact": true,
|
||||
"modules": {
|
||||
"items": [
|
||||
{
|
||||
"content": {
|
||||
"__import": true,
|
||||
"path": "@site/blog/post1.md",
|
||||
"query": {
|
||||
"truncated": true,
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
"content": {
|
||||
"__import": true,
|
||||
"path": "@site/blog/post2.md",
|
||||
"query": {
|
||||
"truncated": true,
|
||||
},
|
||||
},
|
||||
},
|
||||
],
|
||||
"sidebar": "@aliased/data/blog-post-list-prop-default.json",
|
||||
},
|
||||
"path": "/blog/authors/author1",
|
||||
"props": {
|
||||
"author": {
|
||||
"count": 3,
|
||||
"key": "author1",
|
||||
"name": "Author 1",
|
||||
"page": {
|
||||
"permalink": "/blog/authors/author1",
|
||||
},
|
||||
},
|
||||
"listMetadata": {
|
||||
"blogDescription": "Custom blog description",
|
||||
"blogTitle": "Custom blog title",
|
||||
"nextPage": "/blog/authors/author1/page/2",
|
||||
"page": 1,
|
||||
"permalink": "/blog/authors/author1",
|
||||
"postsPerPage": 2,
|
||||
"previousPage": undefined,
|
||||
"totalCount": 3,
|
||||
"totalPages": 2,
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
"component": "@theme/Blog/Pages/BlogAuthorsPostsPage",
|
||||
"context": {
|
||||
"blogMetadata": "@aliased/data/blogMetadata-default.json",
|
||||
},
|
||||
"exact": true,
|
||||
"modules": {
|
||||
"items": [
|
||||
{
|
||||
"content": {
|
||||
"__import": true,
|
||||
"path": "@site/blog/post4.md",
|
||||
"query": {
|
||||
"truncated": true,
|
||||
},
|
||||
},
|
||||
},
|
||||
],
|
||||
"sidebar": "@aliased/data/blog-post-list-prop-default.json",
|
||||
},
|
||||
"path": "/blog/authors/author1/page/2",
|
||||
"props": {
|
||||
"author": {
|
||||
"count": 3,
|
||||
"key": "author1",
|
||||
"name": "Author 1",
|
||||
"page": {
|
||||
"permalink": "/blog/authors/author1",
|
||||
},
|
||||
},
|
||||
"listMetadata": {
|
||||
"blogDescription": "Custom blog description",
|
||||
"blogTitle": "Custom blog title",
|
||||
"nextPage": undefined,
|
||||
"page": 2,
|
||||
"permalink": "/blog/authors/author1/page/2",
|
||||
"postsPerPage": 2,
|
||||
"previousPage": "/blog/authors/author1",
|
||||
"totalCount": 3,
|
||||
"totalPages": 2,
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
"component": "@theme/Blog/Pages/BlogAuthorsPostsPage",
|
||||
"context": {
|
||||
"blogMetadata": "@aliased/data/blogMetadata-default.json",
|
||||
},
|
||||
"exact": true,
|
||||
"modules": {
|
||||
"items": [
|
||||
{
|
||||
"content": {
|
||||
"__import": true,
|
||||
"path": "@site/blog/post5.md",
|
||||
"query": {
|
||||
"truncated": true,
|
||||
},
|
||||
},
|
||||
},
|
||||
],
|
||||
"sidebar": "@aliased/data/blog-post-list-prop-default.json",
|
||||
},
|
||||
"path": "/blog/authors/author3",
|
||||
"props": {
|
||||
"author": {
|
||||
"count": 1,
|
||||
"key": "author3",
|
||||
"name": "Author 3",
|
||||
"page": {
|
||||
"permalink": "/blog/authors/author3",
|
||||
},
|
||||
},
|
||||
"listMetadata": {
|
||||
"blogDescription": "Custom blog description",
|
||||
"blogTitle": "Custom blog title",
|
||||
"nextPage": undefined,
|
||||
"page": 1,
|
||||
"permalink": "/blog/authors/author3",
|
||||
"postsPerPage": 2,
|
||||
"previousPage": undefined,
|
||||
"totalCount": 1,
|
||||
"totalPages": 1,
|
||||
},
|
||||
},
|
||||
},
|
||||
]
|
||||
`;
|
||||
|
||||
exports[`buildAllRoutes works for realistic blog post 3`] = `
|
||||
{
|
||||
"blog-post-list-prop-default.json": {
|
||||
"items": [
|
||||
{
|
||||
"date": 2020-01-01T00:00:00.000Z,
|
||||
"permalink": "/blog/post1",
|
||||
"title": "Title for post1",
|
||||
"unlisted": undefined,
|
||||
},
|
||||
{
|
||||
"date": 2020-01-01T00:00:00.000Z,
|
||||
"permalink": "/blog/post2",
|
||||
"title": "Title for post2",
|
||||
"unlisted": undefined,
|
||||
},
|
||||
{
|
||||
"date": 2020-01-01T00:00:00.000Z,
|
||||
"permalink": "/blog/post3",
|
||||
"title": "Title for post3",
|
||||
"unlisted": true,
|
||||
},
|
||||
{
|
||||
"date": 2020-01-01T00:00:00.000Z,
|
||||
"permalink": "/blog/post4",
|
||||
"title": "Title for post4",
|
||||
"unlisted": undefined,
|
||||
},
|
||||
{
|
||||
"date": 2020-01-01T00:00:00.000Z,
|
||||
"permalink": "/blog/post5",
|
||||
"title": "Title for post5",
|
||||
"unlisted": undefined,
|
||||
},
|
||||
],
|
||||
"title": "Custom blog sidebar title",
|
||||
},
|
||||
"blogMetadata-default.json": {
|
||||
"authorsListPath": "/blog/authors",
|
||||
"blogBasePath": "/blog",
|
||||
"blogTitle": "Custom blog title",
|
||||
},
|
||||
"site-blog-post-1-md-235.json": {
|
||||
"authors": [
|
||||
{
|
||||
"key": "author1",
|
||||
},
|
||||
],
|
||||
"date": 2020-01-01T00:00:00.000Z,
|
||||
"description": "Description for post1",
|
||||
"frontMatter": {},
|
||||
"permalink": "/blog/post1",
|
||||
"readingTime": 2,
|
||||
"source": "@site/blog/post1.md",
|
||||
"tags": [],
|
||||
"title": "Title for post1",
|
||||
},
|
||||
"site-blog-post-2-md-b42.json": {
|
||||
"authors": [
|
||||
{
|
||||
"key": "author1",
|
||||
},
|
||||
],
|
||||
"date": 2020-01-01T00:00:00.000Z,
|
||||
"description": "Description for post2",
|
||||
"frontMatter": {},
|
||||
"permalink": "/blog/post2",
|
||||
"readingTime": 2,
|
||||
"source": "@site/blog/post2.md",
|
||||
"tags": [],
|
||||
"title": "Title for post2",
|
||||
},
|
||||
"site-blog-post-3-md-3b7.json": {
|
||||
"authors": [
|
||||
{
|
||||
"key": "author3",
|
||||
},
|
||||
],
|
||||
"date": 2020-01-01T00:00:00.000Z,
|
||||
"description": "Description for post3",
|
||||
"frontMatter": {},
|
||||
"permalink": "/blog/post3",
|
||||
"readingTime": 2,
|
||||
"source": "@site/blog/post3.md",
|
||||
"tags": [],
|
||||
"title": "Title for post3",
|
||||
"unlisted": true,
|
||||
},
|
||||
"site-blog-post-4-md-15a.json": {
|
||||
"authors": [
|
||||
{
|
||||
"key": "author1",
|
||||
},
|
||||
{
|
||||
"key": "author2",
|
||||
},
|
||||
],
|
||||
"date": 2020-01-01T00:00:00.000Z,
|
||||
"description": "Description for post4",
|
||||
"frontMatter": {},
|
||||
"permalink": "/blog/post4",
|
||||
"readingTime": 2,
|
||||
"source": "@site/blog/post4.md",
|
||||
"tags": [],
|
||||
"title": "Title for post4",
|
||||
},
|
||||
"site-blog-post-5-md-274.json": {
|
||||
"authors": [
|
||||
{
|
||||
"key": "author2",
|
||||
},
|
||||
{
|
||||
"key": "author3",
|
||||
},
|
||||
],
|
||||
"date": 2020-01-01T00:00:00.000Z,
|
||||
"description": "Description for post5",
|
||||
"frontMatter": {},
|
||||
"permalink": "/blog/post5",
|
||||
"readingTime": 2,
|
||||
"source": "@site/blog/post5.md",
|
||||
"tags": [],
|
||||
"title": "Title for post5",
|
||||
},
|
||||
"site-blog-post-6-md-3ca.json": {
|
||||
"authors": [],
|
||||
"date": 2020-01-01T00:00:00.000Z,
|
||||
"description": "Description for post6",
|
||||
"frontMatter": {},
|
||||
"permalink": "/blog/post6",
|
||||
"readingTime": 2,
|
||||
"source": "@site/blog/post6.md",
|
||||
"tags": [],
|
||||
"title": "Title for post6",
|
||||
},
|
||||
}
|
||||
`;
|
||||
|
|
@ -24,7 +24,24 @@ exports[`getContentTranslationFiles returns translation files matching snapshot
|
|||
|
||||
exports[`translateContent falls back when translation is incomplete 1`] = `
|
||||
{
|
||||
"blogDescription": "Someone's random blog",
|
||||
"blogListPaginated": [
|
||||
{
|
||||
"items": [
|
||||
"hello",
|
||||
],
|
||||
"metadata": {
|
||||
"blogDescription": "Someone's random blog",
|
||||
"blogTitle": "My blog",
|
||||
"nextPage": undefined,
|
||||
"page": 1,
|
||||
"permalink": "/",
|
||||
"postsPerPage": 10,
|
||||
"previousPage": undefined,
|
||||
"totalCount": 1,
|
||||
"totalPages": 1,
|
||||
},
|
||||
},
|
||||
],
|
||||
"blogPosts": [
|
||||
{
|
||||
"content": "",
|
||||
|
|
@ -46,13 +63,29 @@ exports[`translateContent falls back when translation is incomplete 1`] = `
|
|||
"blogSidebarTitle": "All my posts",
|
||||
"blogTags": {},
|
||||
"blogTagsListPath": "/tags",
|
||||
"blogTitle": "My blog",
|
||||
}
|
||||
`;
|
||||
|
||||
exports[`translateContent returns translated loaded 1`] = `
|
||||
{
|
||||
"blogDescription": "Someone's random blog (translated)",
|
||||
"blogListPaginated": [
|
||||
{
|
||||
"items": [
|
||||
"hello",
|
||||
],
|
||||
"metadata": {
|
||||
"blogDescription": "Someone's random blog (translated)",
|
||||
"blogTitle": "My blog (translated)",
|
||||
"nextPage": undefined,
|
||||
"page": 1,
|
||||
"permalink": "/",
|
||||
"postsPerPage": 10,
|
||||
"previousPage": undefined,
|
||||
"totalCount": 1,
|
||||
"totalPages": 1,
|
||||
},
|
||||
},
|
||||
],
|
||||
"blogPosts": [
|
||||
{
|
||||
"content": "",
|
||||
|
|
@ -74,6 +107,5 @@ exports[`translateContent returns translated loaded 1`] = `
|
|||
"blogSidebarTitle": "All my posts (translated)",
|
||||
"blogTags": {},
|
||||
"blogTagsListPath": "/tags",
|
||||
"blogTitle": "My blog (translated)",
|
||||
}
|
||||
`;
|
||||
|
|
|
|||
|
|
@ -8,10 +8,7 @@
|
|||
import {jest} from '@jest/globals';
|
||||
import path from 'path';
|
||||
import fs from 'fs-extra';
|
||||
import {
|
||||
DEFAULT_PARSE_FRONT_MATTER,
|
||||
DEFAULT_VCS_CONFIG,
|
||||
} from '@docusaurus/utils';
|
||||
import {DEFAULT_PARSE_FRONT_MATTER} from '@docusaurus/utils';
|
||||
import {fromPartial} from '@total-typescript/shoehorn';
|
||||
import {normalizePluginOptions} from '@docusaurus/utils-validation';
|
||||
import tree from 'tree-node-cli';
|
||||
|
|
@ -54,7 +51,7 @@ function getBlogContentPaths(siteDir: string): BlogContentPaths {
|
|||
}
|
||||
|
||||
async function testGenerateFeeds(
|
||||
contextInput: LoadContext,
|
||||
context: LoadContext,
|
||||
optionsInput: Options,
|
||||
): Promise<void> {
|
||||
const options = validateOptions({
|
||||
|
|
@ -65,17 +62,6 @@ async function testGenerateFeeds(
|
|||
options: optionsInput,
|
||||
});
|
||||
|
||||
const context: LoadContext = {
|
||||
...contextInput,
|
||||
siteConfig: {
|
||||
...contextInput.siteConfig,
|
||||
future: {
|
||||
...contextInput.siteConfig?.future,
|
||||
experimental_vcs: DEFAULT_VCS_CONFIG,
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
const contentPaths = getBlogContentPaths(context.siteDir);
|
||||
const authorsMap = await getAuthorsMap({
|
||||
contentPaths,
|
||||
|
|
|
|||
|
|
@ -8,7 +8,12 @@
|
|||
import {jest} from '@jest/globals';
|
||||
import * as path from 'path';
|
||||
import {normalizePluginOptions} from '@docusaurus/utils-validation';
|
||||
import {posixPath, getLocaleConfig, TEST_VCS} from '@docusaurus/utils';
|
||||
import {
|
||||
posixPath,
|
||||
getFileCommitDate,
|
||||
LAST_UPDATE_FALLBACK,
|
||||
getLocaleConfig,
|
||||
} from '@docusaurus/utils';
|
||||
import {DEFAULT_FUTURE_CONFIG} from '@docusaurus/core/src/server/configValidation';
|
||||
import pluginContentBlog from '../index';
|
||||
import {validateOptions} from '../options';
|
||||
|
|
@ -27,10 +32,6 @@ import type {
|
|||
EditUrlFunction,
|
||||
} from '@docusaurus/plugin-content-blog';
|
||||
|
||||
async function getFileCreationDate(filePath: string): Promise<Date> {
|
||||
return new Date((await TEST_VCS.getFileCreationInfo(filePath)).timestamp);
|
||||
}
|
||||
|
||||
const markdown: MarkdownConfig = {
|
||||
format: 'mdx',
|
||||
mermaid: true,
|
||||
|
|
@ -560,7 +561,9 @@ describe('blog plugin', () => {
|
|||
const blogPosts = await getBlogPosts(siteDir);
|
||||
const noDateSource = path.posix.join('@site', PluginPath, 'no date.md');
|
||||
const noDateSourceFile = path.posix.join(siteDir, PluginPath, 'no date.md');
|
||||
const noDateSourceTime = await getFileCreationDate(noDateSourceFile);
|
||||
// We know the file exists and we know we have git
|
||||
const result = await getFileCommitDate(noDateSourceFile, {age: 'oldest'});
|
||||
const noDateSourceTime = result.date;
|
||||
|
||||
expect({
|
||||
...getByTitle(blogPosts, 'no date').metadata,
|
||||
|
|
@ -638,7 +641,10 @@ describe('blog plugin', () => {
|
|||
},
|
||||
DefaultI18N,
|
||||
);
|
||||
const {blogPosts, blogTags} = (await plugin.loadContent!())!;
|
||||
const {blogPosts, blogTags, blogListPaginated} =
|
||||
(await plugin.loadContent!())!;
|
||||
|
||||
expect(blogListPaginated).toHaveLength(3);
|
||||
|
||||
expect(Object.keys(blogTags)).toHaveLength(2);
|
||||
expect(blogTags).toMatchSnapshot();
|
||||
|
|
@ -668,23 +674,29 @@ describe('last update', () => {
|
|||
);
|
||||
const {blogPosts} = (await plugin.loadContent!())!;
|
||||
|
||||
const TestLastUpdate = await TEST_VCS.getFileLastUpdateInfo('any path');
|
||||
|
||||
expect(blogPosts[0]?.metadata.lastUpdatedBy).toBe('seb');
|
||||
expect(blogPosts[0]?.metadata.lastUpdatedAt).toBe(
|
||||
lastUpdateFor('2021-01-01'),
|
||||
LAST_UPDATE_FALLBACK.lastUpdatedAt,
|
||||
);
|
||||
|
||||
expect(blogPosts[1]?.metadata.lastUpdatedBy).toBe(TestLastUpdate.author);
|
||||
expect(blogPosts[1]?.metadata.lastUpdatedBy).toBe(
|
||||
LAST_UPDATE_FALLBACK.lastUpdatedBy,
|
||||
);
|
||||
expect(blogPosts[1]?.metadata.lastUpdatedAt).toBe(
|
||||
lastUpdateFor('2021-01-01'),
|
||||
LAST_UPDATE_FALLBACK.lastUpdatedAt,
|
||||
);
|
||||
|
||||
expect(blogPosts[2]?.metadata.lastUpdatedBy).toBe('seb');
|
||||
expect(blogPosts[2]?.metadata.lastUpdatedAt).toBe(TestLastUpdate.timestamp);
|
||||
expect(blogPosts[2]?.metadata.lastUpdatedAt).toBe(
|
||||
lastUpdateFor('2021-01-01'),
|
||||
);
|
||||
|
||||
expect(blogPosts[3]?.metadata.lastUpdatedBy).toBe(TestLastUpdate.author);
|
||||
expect(blogPosts[3]?.metadata.lastUpdatedAt).toBe(TestLastUpdate.timestamp);
|
||||
expect(blogPosts[3]?.metadata.lastUpdatedBy).toBe(
|
||||
LAST_UPDATE_FALLBACK.lastUpdatedBy,
|
||||
);
|
||||
expect(blogPosts[3]?.metadata.lastUpdatedAt).toBe(
|
||||
lastUpdateFor('2021-01-01'),
|
||||
);
|
||||
});
|
||||
|
||||
it('time only', async () => {
|
||||
|
|
@ -698,27 +710,29 @@ describe('last update', () => {
|
|||
);
|
||||
const {blogPosts} = (await plugin.loadContent!())!;
|
||||
|
||||
const TestLastUpdate = await TEST_VCS.getFileLastUpdateInfo('any path');
|
||||
|
||||
expect(blogPosts[0]?.metadata.title).toBe('Both');
|
||||
expect(blogPosts[0]?.metadata.title).toBe('Author');
|
||||
expect(blogPosts[0]?.metadata.lastUpdatedBy).toBeUndefined();
|
||||
expect(blogPosts[0]?.metadata.lastUpdatedAt).toBe(
|
||||
lastUpdateFor('2021-01-01'),
|
||||
LAST_UPDATE_FALLBACK.lastUpdatedAt,
|
||||
);
|
||||
|
||||
expect(blogPosts[1]?.metadata.title).toBe('Last update date');
|
||||
expect(blogPosts[1]?.metadata.title).toBe('Nothing');
|
||||
expect(blogPosts[1]?.metadata.lastUpdatedBy).toBeUndefined();
|
||||
expect(blogPosts[1]?.metadata.lastUpdatedAt).toBe(
|
||||
LAST_UPDATE_FALLBACK.lastUpdatedAt,
|
||||
);
|
||||
|
||||
expect(blogPosts[2]?.metadata.title).toBe('Both');
|
||||
expect(blogPosts[2]?.metadata.lastUpdatedBy).toBeUndefined();
|
||||
expect(blogPosts[2]?.metadata.lastUpdatedAt).toBe(
|
||||
lastUpdateFor('2021-01-01'),
|
||||
);
|
||||
|
||||
expect(blogPosts[2]?.metadata.title).toBe('Author');
|
||||
expect(blogPosts[2]?.metadata.lastUpdatedBy).toBeUndefined();
|
||||
expect(blogPosts[2]?.metadata.lastUpdatedAt).toBe(TestLastUpdate.timestamp);
|
||||
|
||||
expect(blogPosts[3]?.metadata.title).toBe('Nothing');
|
||||
expect(blogPosts[3]?.metadata.title).toBe('Last update date');
|
||||
expect(blogPosts[3]?.metadata.lastUpdatedBy).toBeUndefined();
|
||||
expect(blogPosts[3]?.metadata.lastUpdatedAt).toBe(TestLastUpdate.timestamp);
|
||||
expect(blogPosts[3]?.metadata.lastUpdatedAt).toBe(
|
||||
lastUpdateFor('2021-01-01'),
|
||||
);
|
||||
});
|
||||
|
||||
it('author only', async () => {
|
||||
|
|
@ -732,18 +746,20 @@ describe('last update', () => {
|
|||
);
|
||||
const {blogPosts} = (await plugin.loadContent!())!;
|
||||
|
||||
const TestLastUpdate = await TEST_VCS.getFileLastUpdateInfo('any path');
|
||||
|
||||
expect(blogPosts[0]?.metadata.lastUpdatedBy).toBe('seb');
|
||||
expect(blogPosts[0]?.metadata.lastUpdatedAt).toBeUndefined();
|
||||
|
||||
expect(blogPosts[1]?.metadata.lastUpdatedBy).toBe(TestLastUpdate.author);
|
||||
expect(blogPosts[1]?.metadata.lastUpdatedBy).toBe(
|
||||
LAST_UPDATE_FALLBACK.lastUpdatedBy,
|
||||
);
|
||||
expect(blogPosts[1]?.metadata.lastUpdatedAt).toBeUndefined();
|
||||
|
||||
expect(blogPosts[2]?.metadata.lastUpdatedBy).toBe('seb');
|
||||
expect(blogPosts[2]?.metadata.lastUpdatedAt).toBeUndefined();
|
||||
|
||||
expect(blogPosts[3]?.metadata.lastUpdatedBy).toBe(TestLastUpdate.author);
|
||||
expect(blogPosts[3]?.metadata.lastUpdatedBy).toBe(
|
||||
LAST_UPDATE_FALLBACK.lastUpdatedBy,
|
||||
);
|
||||
expect(blogPosts[3]?.metadata.lastUpdatedAt).toBeUndefined();
|
||||
});
|
||||
|
||||
|
|
|
|||
|
|
@ -1,324 +0,0 @@
|
|||
/**
|
||||
* Copyright (c) Facebook, Inc. and its affiliates.
|
||||
*
|
||||
* This source code is licensed under the MIT license found in the
|
||||
* LICENSE file in the root directory of this source tree.
|
||||
*/
|
||||
|
||||
import * as _ from 'lodash';
|
||||
import {fromPartial} from '@total-typescript/shoehorn';
|
||||
import {buildAllRoutes} from '../routes';
|
||||
import {DEFAULT_OPTIONS} from '../options';
|
||||
import type {PartialDeep} from '@total-typescript/shoehorn';
|
||||
import type {BlogPost, BlogPostMetadata} from '@docusaurus/plugin-content-blog';
|
||||
|
||||
type Params = Parameters<typeof buildAllRoutes>[0];
|
||||
|
||||
async function testBuildAllRoutes(overrides: PartialDeep<Params> = {}) {
|
||||
const createData = jest.fn(
|
||||
async (name: string, _data: unknown) => `/data/${name}`,
|
||||
);
|
||||
|
||||
const params: Params = fromPartial<Params>({
|
||||
baseUrl: '/',
|
||||
aliasedSource: (str: string) => `@aliased${str}`,
|
||||
...overrides,
|
||||
|
||||
content: {
|
||||
blogTitle: 'Blog Title',
|
||||
blogDescription: 'Blog Description',
|
||||
blogSidebarTitle: 'Blog Sidebar Title',
|
||||
authorsMap: {},
|
||||
blogTagsListPath: '',
|
||||
blogTags: {},
|
||||
blogPosts: [],
|
||||
...overrides?.content,
|
||||
},
|
||||
options: {
|
||||
...DEFAULT_OPTIONS,
|
||||
...overrides?.options,
|
||||
},
|
||||
actions: {
|
||||
createData,
|
||||
...overrides?.actions,
|
||||
},
|
||||
});
|
||||
|
||||
const routes = await buildAllRoutes(params);
|
||||
|
||||
const data = Object.fromEntries(
|
||||
createData.mock.calls.map((call) => [call[0], call[1]]),
|
||||
);
|
||||
|
||||
function getRouteByPath(path: string) {
|
||||
const route = routes.find((r) => r.path === path);
|
||||
if (!route) {
|
||||
throw new Error(`Route not found for path: ${path}`);
|
||||
}
|
||||
return route;
|
||||
}
|
||||
|
||||
function getRoutesByComponent(component: string) {
|
||||
return routes.filter((r) => r.component === component);
|
||||
}
|
||||
|
||||
return {routes, data, utils: {getRouteByPath, getRoutesByComponent}};
|
||||
}
|
||||
|
||||
function blogPost(overrides: PartialDeep<BlogPost> = {}): BlogPost {
|
||||
const id = overrides.id ?? 'blog-post';
|
||||
return fromPartial<BlogPost>({
|
||||
id,
|
||||
content: `Content for ${id}`,
|
||||
...overrides,
|
||||
metadata: fromPartial<BlogPostMetadata>({
|
||||
title: `Title for ${id}`,
|
||||
description: `Description for ${id}`,
|
||||
permalink: `/blog/${id}`,
|
||||
source: `@site/blog/${id}.md`,
|
||||
date: new Date('2020-01-01'),
|
||||
tags: [],
|
||||
readingTime: 2,
|
||||
authors: [],
|
||||
frontMatter: {
|
||||
...overrides?.metadata?.frontMatter,
|
||||
},
|
||||
...overrides?.metadata,
|
||||
}),
|
||||
});
|
||||
}
|
||||
|
||||
describe('buildAllRoutes', () => {
|
||||
it('works for empty blog', async () => {
|
||||
const {routes, data} = await testBuildAllRoutes({
|
||||
content: {
|
||||
blogPosts: [],
|
||||
},
|
||||
});
|
||||
|
||||
expect(routes).toMatchInlineSnapshot(`
|
||||
[
|
||||
{
|
||||
"component": "@theme/BlogListPage",
|
||||
"exact": true,
|
||||
"modules": {
|
||||
"items": [],
|
||||
"sidebar": "@aliased/data/blog-post-list-prop-default.json",
|
||||
},
|
||||
"path": "/blog",
|
||||
"props": {
|
||||
"metadata": {
|
||||
"blogDescription": "Blog Description",
|
||||
"blogTitle": "Blog Title",
|
||||
"nextPage": undefined,
|
||||
"page": 1,
|
||||
"permalink": "/blog",
|
||||
"postsPerPage": 10,
|
||||
"previousPage": undefined,
|
||||
"totalCount": 0,
|
||||
"totalPages": 1,
|
||||
},
|
||||
},
|
||||
},
|
||||
]
|
||||
`);
|
||||
expect(data).toMatchInlineSnapshot(`
|
||||
{
|
||||
"blog-post-list-prop-default.json": {
|
||||
"items": [],
|
||||
"title": "Blog Sidebar Title",
|
||||
},
|
||||
"blogMetadata-default.json": {
|
||||
"authorsListPath": "/blog/authors",
|
||||
"blogBasePath": "/blog",
|
||||
"blogTitle": "Blog Title",
|
||||
},
|
||||
}
|
||||
`);
|
||||
});
|
||||
|
||||
it('works for single blog post', async () => {
|
||||
const {routes, data} = await testBuildAllRoutes({
|
||||
content: {
|
||||
blogPosts: [blogPost()],
|
||||
},
|
||||
});
|
||||
|
||||
expect(routes).toMatchInlineSnapshot(`
|
||||
[
|
||||
{
|
||||
"component": "@theme/BlogPostPage",
|
||||
"context": {
|
||||
"blogMetadata": "@aliased/data/blogMetadata-default.json",
|
||||
},
|
||||
"exact": true,
|
||||
"metadata": {
|
||||
"lastUpdatedAt": undefined,
|
||||
"sourceFilePath": "blog/blog-post.md",
|
||||
},
|
||||
"modules": {
|
||||
"content": "@site/blog/blog-post.md",
|
||||
"sidebar": "@aliased/data/blog-post-list-prop-default.json",
|
||||
},
|
||||
"path": "/blog/blog-post",
|
||||
},
|
||||
{
|
||||
"component": "@theme/BlogListPage",
|
||||
"exact": true,
|
||||
"modules": {
|
||||
"items": [
|
||||
{
|
||||
"content": {
|
||||
"__import": true,
|
||||
"path": "@site/blog/blog-post.md",
|
||||
"query": {
|
||||
"truncated": true,
|
||||
},
|
||||
},
|
||||
},
|
||||
],
|
||||
"sidebar": "@aliased/data/blog-post-list-prop-default.json",
|
||||
},
|
||||
"path": "/blog",
|
||||
"props": {
|
||||
"metadata": {
|
||||
"blogDescription": "Blog Description",
|
||||
"blogTitle": "Blog Title",
|
||||
"nextPage": undefined,
|
||||
"page": 1,
|
||||
"permalink": "/blog",
|
||||
"postsPerPage": 10,
|
||||
"previousPage": undefined,
|
||||
"totalCount": 1,
|
||||
"totalPages": 1,
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
"component": "@theme/BlogArchivePage",
|
||||
"exact": true,
|
||||
"path": "/blog/archive",
|
||||
"props": {
|
||||
"archive": {
|
||||
"blogPosts": [
|
||||
{
|
||||
"content": "Content for blog-post",
|
||||
"id": "blog-post",
|
||||
"metadata": {
|
||||
"authors": [],
|
||||
"date": 2020-01-01T00:00:00.000Z,
|
||||
"description": "Description for blog-post",
|
||||
"frontMatter": {},
|
||||
"permalink": "/blog/blog-post",
|
||||
"readingTime": 2,
|
||||
"source": "@site/blog/blog-post.md",
|
||||
"tags": [],
|
||||
"title": "Title for blog-post",
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
},
|
||||
]
|
||||
`);
|
||||
expect(data).toMatchInlineSnapshot(`
|
||||
{
|
||||
"blog-post-list-prop-default.json": {
|
||||
"items": [
|
||||
{
|
||||
"date": 2020-01-01T00:00:00.000Z,
|
||||
"permalink": "/blog/blog-post",
|
||||
"title": "Title for blog-post",
|
||||
"unlisted": undefined,
|
||||
},
|
||||
],
|
||||
"title": "Blog Sidebar Title",
|
||||
},
|
||||
"blogMetadata-default.json": {
|
||||
"authorsListPath": "/blog/authors",
|
||||
"blogBasePath": "/blog",
|
||||
"blogTitle": "Blog Title",
|
||||
},
|
||||
"site-blog-blog-post-md-0d7.json": {
|
||||
"authors": [],
|
||||
"date": 2020-01-01T00:00:00.000Z,
|
||||
"description": "Description for blog-post",
|
||||
"frontMatter": {},
|
||||
"permalink": "/blog/blog-post",
|
||||
"readingTime": 2,
|
||||
"source": "@site/blog/blog-post.md",
|
||||
"tags": [],
|
||||
"title": "Title for blog-post",
|
||||
},
|
||||
}
|
||||
`);
|
||||
});
|
||||
|
||||
it('works for realistic blog post', async () => {
|
||||
const {routes, data} = await testBuildAllRoutes({
|
||||
options: {
|
||||
postsPerPage: 2,
|
||||
},
|
||||
content: {
|
||||
blogTitle: 'Custom blog title',
|
||||
blogDescription: 'Custom blog description',
|
||||
blogSidebarTitle: 'Custom blog sidebar title',
|
||||
|
||||
blogPosts: [
|
||||
blogPost({id: 'post1', metadata: {authors: [{key: 'author1'}]}}),
|
||||
blogPost({id: 'post2', metadata: {authors: [{key: 'author1'}]}}),
|
||||
blogPost({
|
||||
id: 'post3',
|
||||
metadata: {
|
||||
authors: [{key: 'author3'}],
|
||||
unlisted: true,
|
||||
},
|
||||
}),
|
||||
blogPost({
|
||||
id: 'post4',
|
||||
metadata: {
|
||||
authors: [{key: 'author1'}, {key: 'author2'}],
|
||||
},
|
||||
}),
|
||||
blogPost({
|
||||
id: 'post5',
|
||||
metadata: {authors: [{key: 'author2'}, {key: 'author3'}]},
|
||||
}),
|
||||
blogPost({id: 'post6'}),
|
||||
],
|
||||
|
||||
authorsMap: {
|
||||
author1: {
|
||||
key: 'author1',
|
||||
name: 'Author 1',
|
||||
page: {permalink: '/blog/authors/author1'},
|
||||
},
|
||||
author2: {
|
||||
key: 'author2',
|
||||
name: 'Author 2',
|
||||
page: null,
|
||||
},
|
||||
author3: {
|
||||
key: 'author3',
|
||||
name: 'Author 3',
|
||||
page: {permalink: '/blog/authors/author3'},
|
||||
},
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
expect(_.countBy(routes, 'component')).toMatchInlineSnapshot(`
|
||||
{
|
||||
"@theme/Blog/Pages/BlogAuthorsListPage": 1,
|
||||
"@theme/Blog/Pages/BlogAuthorsPostsPage": 3,
|
||||
"@theme/BlogArchivePage": 1,
|
||||
"@theme/BlogListPage": 3,
|
||||
"@theme/BlogPostPage": 6,
|
||||
}
|
||||
`);
|
||||
|
||||
expect(routes).toMatchSnapshot();
|
||||
expect(data).toMatchSnapshot();
|
||||
});
|
||||
});
|
||||
|
|
@ -6,7 +6,6 @@
|
|||
*/
|
||||
|
||||
import {updateTranslationFileMessages} from '@docusaurus/utils';
|
||||
import {fromPartial} from '@total-typescript/shoehorn';
|
||||
import {getTranslationFiles, translateContent} from '../translations';
|
||||
import {DEFAULT_OPTIONS} from '../options';
|
||||
import type {
|
||||
|
|
@ -17,13 +16,13 @@ import type {
|
|||
|
||||
const sampleBlogOptions: PluginOptions = {
|
||||
...DEFAULT_OPTIONS,
|
||||
blogSidebarTitle: 'All my posts',
|
||||
blogTitle: 'My blog',
|
||||
blogDescription: "Someone's random blog",
|
||||
blogSidebarTitle: 'All my posts',
|
||||
};
|
||||
|
||||
const sampleBlogPosts: BlogPost[] = [
|
||||
fromPartial({
|
||||
{
|
||||
id: 'hello',
|
||||
metadata: {
|
||||
permalink: '/blog/2021/06/19/hello',
|
||||
|
|
@ -38,13 +37,27 @@ const sampleBlogPosts: BlogPost[] = [
|
|||
unlisted: false,
|
||||
},
|
||||
content: '',
|
||||
}),
|
||||
},
|
||||
];
|
||||
|
||||
const sampleBlogContent: BlogContent = {
|
||||
blogTitle: sampleBlogOptions.blogTitle,
|
||||
blogDescription: sampleBlogOptions.blogDescription,
|
||||
blogSidebarTitle: sampleBlogOptions.blogSidebarTitle,
|
||||
blogListPaginated: [
|
||||
{
|
||||
items: ['hello'],
|
||||
metadata: {
|
||||
permalink: '/',
|
||||
page: 1,
|
||||
postsPerPage: 10,
|
||||
totalPages: 1,
|
||||
totalCount: 1,
|
||||
previousPage: undefined,
|
||||
nextPage: undefined,
|
||||
blogTitle: sampleBlogOptions.blogTitle,
|
||||
blogDescription: sampleBlogOptions.blogDescription,
|
||||
},
|
||||
},
|
||||
],
|
||||
blogPosts: sampleBlogPosts,
|
||||
blogTags: {},
|
||||
blogTagsListPath: '/tags',
|
||||
|
|
|
|||
|
|
@ -19,6 +19,7 @@ import {
|
|||
Globby,
|
||||
groupTaggedItems,
|
||||
getTagVisibility,
|
||||
getFileCommitDate,
|
||||
getContentPathList,
|
||||
isUnlisted,
|
||||
isDraft,
|
||||
|
|
@ -224,7 +225,6 @@ async function processBlogSourceFile(
|
|||
siteConfig: {
|
||||
baseUrl,
|
||||
markdown: {parseFrontMatter},
|
||||
future: {experimental_vcs: vcs},
|
||||
},
|
||||
siteDir,
|
||||
i18n,
|
||||
|
|
@ -257,7 +257,6 @@ async function processBlogSourceFile(
|
|||
blogSourceAbsolute,
|
||||
options,
|
||||
frontMatter.last_update,
|
||||
vcs,
|
||||
);
|
||||
|
||||
const draft = isDraft({frontMatter});
|
||||
|
|
@ -286,11 +285,17 @@ async function processBlogSourceFile(
|
|||
return parsedBlogFileName.date;
|
||||
}
|
||||
|
||||
const result = await vcs.getFileCreationInfo(blogSourceAbsolute);
|
||||
if (result == null) {
|
||||
try {
|
||||
const result = await getFileCommitDate(blogSourceAbsolute, {
|
||||
age: 'oldest',
|
||||
includeAuthor: false,
|
||||
});
|
||||
|
||||
return result.date;
|
||||
} catch (err) {
|
||||
logger.warn(err);
|
||||
return (await fs.stat(blogSourceAbsolute)).birthtime;
|
||||
}
|
||||
return new Date(result.timestamp);
|
||||
}
|
||||
|
||||
const date = await getDate();
|
||||
|
|
@ -401,8 +406,6 @@ export async function generateBlogPosts(
|
|||
ignore: exclude,
|
||||
});
|
||||
|
||||
// TODO this should be done outside of this function
|
||||
// directly in plugin loadContent()
|
||||
const tagsFile = await getTagsFile({contentPaths, tags: options.tags});
|
||||
|
||||
async function doProcessBlogSourceFile(blogSourceFile: string) {
|
||||
|
|
|
|||
|
|
@ -17,6 +17,7 @@ import {
|
|||
createAbsoluteFilePathMatcher,
|
||||
getContentPathList,
|
||||
getDataFilePath,
|
||||
DEFAULT_PLUGIN_ID,
|
||||
resolveMarkdownLinkPathname,
|
||||
getLocaleConfig,
|
||||
} from '@docusaurus/utils';
|
||||
|
|
@ -24,6 +25,7 @@ import {getTagsFilePathsToWatch} from '@docusaurus/utils-validation';
|
|||
import {createMDXLoaderItem} from '@docusaurus/mdx-loader';
|
||||
import {
|
||||
getBlogTags,
|
||||
paginateBlogPosts,
|
||||
shouldBeListed,
|
||||
applyProcessBlogPosts,
|
||||
generateBlogPosts,
|
||||
|
|
@ -43,6 +45,7 @@ import type {
|
|||
Assets,
|
||||
BlogTags,
|
||||
BlogContent,
|
||||
BlogPaginated,
|
||||
} from '@docusaurus/plugin-content-blog';
|
||||
import type {RuleSetRule, RuleSetUseItem} from 'webpack';
|
||||
|
||||
|
|
@ -82,7 +85,7 @@ export default async function pluginContentBlog(
|
|||
})
|
||||
: undefined,
|
||||
};
|
||||
const pluginId = options.id;
|
||||
const pluginId = options.id ?? DEFAULT_PLUGIN_ID;
|
||||
|
||||
const pluginDataDirRoot = path.join(generatedFilesDir, PluginName);
|
||||
const dataDir = path.join(pluginDataDirRoot, pluginId);
|
||||
|
|
@ -257,10 +260,9 @@ export default async function pluginContentBlog(
|
|||
|
||||
if (!blogPosts.length) {
|
||||
return {
|
||||
blogTitle,
|
||||
blogDescription,
|
||||
blogSidebarTitle,
|
||||
blogPosts: [],
|
||||
blogListPaginated: [],
|
||||
blogTags: {},
|
||||
blogTagsListPath,
|
||||
authorsMap,
|
||||
|
|
@ -289,9 +291,15 @@ export default async function pluginContentBlog(
|
|||
}
|
||||
});
|
||||
|
||||
// TODO this is not the correct place to aggregate and paginate tags
|
||||
// for reasons similar to https://github.com/facebook/docusaurus/pull/11562
|
||||
// What we should do here is only read the tags file (similar to authors)
|
||||
const blogListPaginated: BlogPaginated[] = paginateBlogPosts({
|
||||
blogPosts: listedBlogPosts,
|
||||
blogTitle,
|
||||
blogDescription,
|
||||
postsPerPageOption,
|
||||
basePageUrl: baseBlogUrl,
|
||||
pageBasePath,
|
||||
});
|
||||
|
||||
const blogTags: BlogTags = getBlogTags({
|
||||
blogPosts,
|
||||
postsPerPageOption,
|
||||
|
|
@ -301,10 +309,9 @@ export default async function pluginContentBlog(
|
|||
});
|
||||
|
||||
return {
|
||||
blogTitle,
|
||||
blogDescription,
|
||||
blogSidebarTitle,
|
||||
blogPosts,
|
||||
blogListPaginated,
|
||||
blogTags,
|
||||
blogTagsListPath,
|
||||
authorsMap,
|
||||
|
|
|
|||
|
|
@ -15,7 +15,7 @@ import {
|
|||
RouteBasePathSchema,
|
||||
URISchema,
|
||||
} from '@docusaurus/utils-validation';
|
||||
import {DEFAULT_PLUGIN_ID, GlobExcludeDefault} from '@docusaurus/utils';
|
||||
import {GlobExcludeDefault} from '@docusaurus/utils';
|
||||
import type {
|
||||
PluginOptions,
|
||||
Options,
|
||||
|
|
@ -25,7 +25,6 @@ import type {
|
|||
import type {OptionValidationContext} from '@docusaurus/types';
|
||||
|
||||
export const DEFAULT_OPTIONS: PluginOptions = {
|
||||
id: DEFAULT_PLUGIN_ID,
|
||||
feedOptions: {
|
||||
type: ['rss', 'atom'],
|
||||
copyright: '',
|
||||
|
|
|
|||
|
|
@ -431,7 +431,7 @@ declare module '@docusaurus/plugin-content-blog' {
|
|||
export type PluginOptions = MDXOptions &
|
||||
TagsPluginOptions & {
|
||||
/** Plugin ID. */
|
||||
id: string;
|
||||
id?: string;
|
||||
/**
|
||||
* Path to the blog content directory on the file system, relative to site
|
||||
* directory.
|
||||
|
|
@ -583,10 +583,9 @@ declare module '@docusaurus/plugin-content-blog' {
|
|||
export type AuthorsMap = {[authorKey: string]: AuthorWithKey};
|
||||
|
||||
export type BlogContent = {
|
||||
blogTitle: string; // for translation purposes
|
||||
blogDescription: string; // for translation purposes
|
||||
blogSidebarTitle: string; // for translation purposes
|
||||
blogSidebarTitle: string;
|
||||
blogPosts: BlogPost[];
|
||||
blogListPaginated: BlogPaginated[];
|
||||
blogTags: BlogTags;
|
||||
blogTagsListPath: string;
|
||||
authorsMap?: AuthorsMap;
|
||||
|
|
|
|||
|
|
@ -67,24 +67,27 @@ export async function buildAllRoutes({
|
|||
blogArchiveComponent,
|
||||
routeBasePath,
|
||||
archiveBasePath,
|
||||
blogTitle,
|
||||
authorsBasePath,
|
||||
postsPerPage,
|
||||
pageBasePath,
|
||||
blogDescription,
|
||||
} = options;
|
||||
const pluginId = options.id;
|
||||
const pluginId = options.id!;
|
||||
const {createData} = actions;
|
||||
const {
|
||||
blogTitle,
|
||||
blogDescription,
|
||||
blogSidebarTitle,
|
||||
blogPosts,
|
||||
blogListPaginated,
|
||||
blogTags,
|
||||
blogTagsListPath,
|
||||
authorsMap,
|
||||
} = content;
|
||||
|
||||
const blogBasePath = normalizeUrl([baseUrl, routeBasePath]);
|
||||
const authorsListPath = normalizeUrl([blogBasePath, authorsBasePath]);
|
||||
const authorsListPath = normalizeUrl([
|
||||
baseUrl,
|
||||
routeBasePath,
|
||||
authorsBasePath,
|
||||
]);
|
||||
|
||||
const listedBlogPosts = blogPosts.filter(shouldBeListed);
|
||||
|
||||
|
|
@ -116,7 +119,7 @@ export async function buildAllRoutes({
|
|||
|
||||
async function createBlogMetadataModule() {
|
||||
const blogMetadata: BlogMetadata = {
|
||||
blogBasePath,
|
||||
blogBasePath: normalizeUrl([baseUrl, routeBasePath]),
|
||||
blogTitle,
|
||||
authorsListPath,
|
||||
};
|
||||
|
|
@ -153,7 +156,7 @@ export async function buildAllRoutes({
|
|||
if (archiveBasePath && listedBlogPosts.length) {
|
||||
return [
|
||||
{
|
||||
path: normalizeUrl([blogBasePath, archiveBasePath]),
|
||||
path: normalizeUrl([baseUrl, routeBasePath, archiveBasePath]),
|
||||
component: blogArchiveComponent,
|
||||
exact: true,
|
||||
props: {
|
||||
|
|
@ -207,15 +210,6 @@ export async function buildAllRoutes({
|
|||
}
|
||||
|
||||
function createBlogPostsPaginatedRoutes(): RouteConfig[] {
|
||||
const blogListPaginated = paginateBlogPosts({
|
||||
blogPosts: listedBlogPosts,
|
||||
blogTitle,
|
||||
blogDescription,
|
||||
postsPerPageOption: postsPerPage,
|
||||
basePageUrl: blogBasePath,
|
||||
pageBasePath,
|
||||
});
|
||||
|
||||
return blogListPaginated.map((paginated) => {
|
||||
return {
|
||||
path: paginated.metadata.permalink,
|
||||
|
|
@ -300,14 +294,12 @@ export async function buildAllRoutes({
|
|||
sidebar: sidebarModulePath,
|
||||
},
|
||||
props: {
|
||||
authors: authors.map((author) => {
|
||||
const authorPosts = blogPostsByAuthorKey[author.key] ?? [];
|
||||
const listedAuthorPosts = authorPosts.filter(shouldBeListed);
|
||||
return toAuthorItemProp({
|
||||
authors: authors.map((author) =>
|
||||
toAuthorItemProp({
|
||||
author,
|
||||
count: listedAuthorPosts.length,
|
||||
});
|
||||
}),
|
||||
count: blogPostsByAuthorKey[author.key]?.length ?? 0,
|
||||
}),
|
||||
),
|
||||
},
|
||||
context: {
|
||||
blogMetadata: blogMetadataModulePath,
|
||||
|
|
@ -317,17 +309,16 @@ export async function buildAllRoutes({
|
|||
|
||||
function createAuthorPaginatedRoute(author: AuthorWithKey): RouteConfig[] {
|
||||
const authorBlogPosts = blogPostsByAuthorKey[author.key] ?? [];
|
||||
const listedAuthorBlogPosts = authorBlogPosts.filter(shouldBeListed);
|
||||
if (!author.page) {
|
||||
return [];
|
||||
}
|
||||
|
||||
const pages = paginateBlogPosts({
|
||||
blogPosts: listedAuthorBlogPosts,
|
||||
blogPosts: authorBlogPosts,
|
||||
basePageUrl: author.page.permalink,
|
||||
blogDescription,
|
||||
blogTitle,
|
||||
pageBasePath,
|
||||
pageBasePath: authorsBasePath,
|
||||
postsPerPageOption: postsPerPage,
|
||||
});
|
||||
|
||||
|
|
@ -341,10 +332,7 @@ export async function buildAllRoutes({
|
|||
sidebar: sidebarModulePath,
|
||||
},
|
||||
props: {
|
||||
author: toAuthorItemProp({
|
||||
author,
|
||||
count: listedAuthorBlogPosts.length,
|
||||
}),
|
||||
author: toAuthorItemProp({author, count: authorBlogPosts.length}),
|
||||
listMetadata: metadata,
|
||||
},
|
||||
context: {
|
||||
|
|
|
|||
|
|
@ -5,8 +5,30 @@
|
|||
* LICENSE file in the root directory of this source tree.
|
||||
*/
|
||||
|
||||
import type {TranslationFile} from '@docusaurus/types';
|
||||
import type {PluginOptions, BlogContent} from '@docusaurus/plugin-content-blog';
|
||||
import type {TranslationFileContent, TranslationFile} from '@docusaurus/types';
|
||||
import type {
|
||||
PluginOptions,
|
||||
BlogContent,
|
||||
BlogPaginated,
|
||||
} from '@docusaurus/plugin-content-blog';
|
||||
|
||||
function translateListPage(
|
||||
blogListPaginated: BlogPaginated[],
|
||||
translations: TranslationFileContent,
|
||||
) {
|
||||
return blogListPaginated.map((page) => {
|
||||
const {items, metadata} = page;
|
||||
return {
|
||||
items,
|
||||
metadata: {
|
||||
...metadata,
|
||||
blogTitle: translations.title?.message ?? page.metadata.blogTitle,
|
||||
blogDescription:
|
||||
translations.description?.message ?? page.metadata.blogDescription,
|
||||
},
|
||||
};
|
||||
});
|
||||
}
|
||||
|
||||
export function getTranslationFiles(options: PluginOptions): TranslationFile[] {
|
||||
return [
|
||||
|
|
@ -34,13 +56,14 @@ export function translateContent(
|
|||
content: BlogContent,
|
||||
translationFiles: TranslationFile[],
|
||||
): BlogContent {
|
||||
const {content: translations} = translationFiles[0]!;
|
||||
const {content: optionsTranslations} = translationFiles[0]!;
|
||||
return {
|
||||
...content,
|
||||
blogTitle: translations.title?.message ?? content.blogTitle,
|
||||
blogDescription:
|
||||
translations.description?.message ?? content.blogDescription,
|
||||
blogSidebarTitle:
|
||||
translations['sidebar.title']?.message ?? content.blogSidebarTitle,
|
||||
optionsTranslations['sidebar.title']?.message ?? content.blogSidebarTitle,
|
||||
blogListPaginated: translateListPage(
|
||||
content.blogListPaginated,
|
||||
optionsTranslations,
|
||||
),
|
||||
};
|
||||
}
|
||||
|
|
|
|||
|
|
@ -12,8 +12,8 @@ import {
|
|||
createSlugger,
|
||||
posixPath,
|
||||
DEFAULT_PLUGIN_ID,
|
||||
LAST_UPDATE_FALLBACK,
|
||||
getLocaleConfig,
|
||||
TEST_VCS,
|
||||
} from '@docusaurus/utils';
|
||||
import {getTagsFile} from '@docusaurus/utils-validation';
|
||||
import {createSidebarsUtils} from '../sidebars/utils';
|
||||
|
|
@ -529,8 +529,8 @@ describe('simple site', () => {
|
|||
custom_edit_url: 'https://github.com/customUrl/docs/lorem.md',
|
||||
unrelated_front_matter: "won't be part of metadata",
|
||||
},
|
||||
lastUpdatedAt: TEST_VCS.LAST_UPDATE_INFO.timestamp,
|
||||
lastUpdatedBy: TEST_VCS.LAST_UPDATE_INFO.author,
|
||||
lastUpdatedAt: LAST_UPDATE_FALLBACK.lastUpdatedAt,
|
||||
lastUpdatedBy: LAST_UPDATE_FALLBACK.lastUpdatedBy,
|
||||
tags: [],
|
||||
unlisted: false,
|
||||
});
|
||||
|
|
@ -664,7 +664,7 @@ describe('simple site', () => {
|
|||
},
|
||||
title: 'Last Update Author Only',
|
||||
},
|
||||
lastUpdatedAt: TEST_VCS.LAST_UPDATE_INFO.timestamp,
|
||||
lastUpdatedAt: LAST_UPDATE_FALLBACK.lastUpdatedAt,
|
||||
lastUpdatedBy: 'Custom Author (processed by parseFrontMatter)',
|
||||
sidebarPosition: undefined,
|
||||
tags: [],
|
||||
|
|
|
|||
|
|
@ -568,28 +568,13 @@ describe('useSidebarBreadcrumbs', () => {
|
|||
|
||||
it('returns first level link', () => {
|
||||
const pathname = '/somePathName';
|
||||
const sidebar = [testCategory(), testLink({href: pathname, docId: 'doc1'})];
|
||||
const sidebar = [testCategory(), testLink({href: pathname})];
|
||||
|
||||
expect(createUseSidebarBreadcrumbsMock(sidebar)(pathname)).toEqual([
|
||||
sidebar[1],
|
||||
]);
|
||||
});
|
||||
|
||||
it('returns doc links only', () => {
|
||||
const pathname = '/somePathName';
|
||||
|
||||
// A link that is not a doc link should not appear in the breadcrumbs
|
||||
// See https://github.com/facebook/docusaurus/pull/11616
|
||||
const nonDocLink = testLink({href: pathname});
|
||||
const docLink = testLink({href: pathname, docId: 'doc1'});
|
||||
|
||||
const sidebar = [testCategory(), nonDocLink, docLink];
|
||||
|
||||
expect(createUseSidebarBreadcrumbsMock(sidebar)(pathname)).toEqual([
|
||||
docLink,
|
||||
]);
|
||||
});
|
||||
|
||||
it('returns nested category', () => {
|
||||
const pathname = '/somePathName';
|
||||
|
||||
|
|
@ -628,7 +613,7 @@ describe('useSidebarBreadcrumbs', () => {
|
|||
it('returns nested link', () => {
|
||||
const pathname = '/somePathName';
|
||||
|
||||
const link = testLink({href: pathname, docId: 'docNested'});
|
||||
const link = testLink({href: pathname});
|
||||
|
||||
const categoryLevel3 = testCategory({
|
||||
items: [testLink(), link, testLink()],
|
||||
|
|
@ -672,35 +657,6 @@ describe('useSidebarBreadcrumbs', () => {
|
|||
createUseSidebarBreadcrumbsMock(undefined, false)('/foo'),
|
||||
).toBeNull();
|
||||
});
|
||||
|
||||
// Regression test for https://github.com/facebook/docusaurus/issues/11612
|
||||
it('returns the category that owns the URL, not a category with a link pointing to it', () => {
|
||||
const categoryA: PropSidebarItemCategory = testCategory({
|
||||
label: 'Category A',
|
||||
href: '/category-a',
|
||||
items: [
|
||||
testLink({href: '/category-a/doc1', label: 'Doc 1'}),
|
||||
testLink({href: '/category-a/doc2', label: 'Doc 2'}),
|
||||
// This link points to Category B's generated-index
|
||||
testLink({href: '/category-b', label: 'Go to Category B'}),
|
||||
],
|
||||
});
|
||||
|
||||
const categoryB: PropSidebarItemCategory = testCategory({
|
||||
label: 'Category B',
|
||||
href: '/category-b',
|
||||
items: [
|
||||
testLink({href: '/category-b/item1', label: 'Item 1'}),
|
||||
testLink({href: '/category-b/item2', label: 'Item 2'}),
|
||||
],
|
||||
});
|
||||
|
||||
const sidebar: PropSidebar = [categoryA, categoryB];
|
||||
|
||||
expect(createUseSidebarBreadcrumbsMock(sidebar)('/category-b')).toEqual([
|
||||
categoryB,
|
||||
]);
|
||||
});
|
||||
});
|
||||
|
||||
describe('useCurrentSidebarCategory', () => {
|
||||
|
|
@ -752,16 +708,12 @@ describe('useCurrentSidebarCategory', () => {
|
|||
expect(mockUseCurrentSidebarCategory('/cat2')).toEqual(category2);
|
||||
});
|
||||
|
||||
it('works for category doc link item', () => {
|
||||
const pathname = '/my/link/path';
|
||||
const nonDocLink = testLink({href: pathname});
|
||||
const docLink = testLink({href: pathname, docId: 'doc1'});
|
||||
|
||||
it('works for category link item', () => {
|
||||
const link = testLink({href: '/my/link/path'});
|
||||
const category: PropSidebarItemCategory = testCategory({
|
||||
href: '/cat1',
|
||||
items: [testLink(), testLink(), nonDocLink, docLink, testCategory()],
|
||||
items: [testLink(), testLink(), link, testCategory()],
|
||||
});
|
||||
|
||||
const sidebar: PropSidebar = [
|
||||
testLink(),
|
||||
testLink(),
|
||||
|
|
@ -772,28 +724,18 @@ describe('useCurrentSidebarCategory', () => {
|
|||
const mockUseCurrentSidebarCategory =
|
||||
createUseCurrentSidebarCategoryMock(sidebar);
|
||||
|
||||
expect(mockUseCurrentSidebarCategory(pathname)).toEqual(category);
|
||||
expect(mockUseCurrentSidebarCategory('/my/link/path')).toEqual(category);
|
||||
});
|
||||
|
||||
it('works for nested category link item', () => {
|
||||
const pathname = '/my/link/path';
|
||||
const nonDocLink = testLink({href: pathname});
|
||||
const docLink = testLink({href: pathname, docId: 'doc1'});
|
||||
|
||||
const link = testLink({href: '/my/link/path'});
|
||||
const category2: PropSidebarItemCategory = testCategory({
|
||||
href: '/cat2',
|
||||
items: [
|
||||
testLink(),
|
||||
testLink(),
|
||||
testCategory({items: [nonDocLink]}),
|
||||
nonDocLink,
|
||||
docLink,
|
||||
testCategory(),
|
||||
],
|
||||
items: [testLink(), testLink(), link, testCategory()],
|
||||
});
|
||||
const category1: PropSidebarItemCategory = testCategory({
|
||||
href: '/cat1',
|
||||
items: [testLink(), nonDocLink, testLink(), category2, testCategory()],
|
||||
items: [testLink(), testLink(), category2, testCategory()],
|
||||
});
|
||||
const sidebar: PropSidebar = [
|
||||
testLink(),
|
||||
|
|
@ -838,38 +780,6 @@ describe('useCurrentSidebarCategory', () => {
|
|||
`"Unexpected: cant find current sidebar in context"`,
|
||||
);
|
||||
});
|
||||
|
||||
// Regression test for https://github.com/facebook/docusaurus/issues/11612
|
||||
it('returns the category that owns the URL, not a category with a link pointing to it', () => {
|
||||
const categoryA: PropSidebarItemCategory = testCategory({
|
||||
label: 'Category A',
|
||||
href: '/category-a',
|
||||
items: [
|
||||
testLink({href: '/category-a/doc1', label: 'Doc 1'}),
|
||||
testLink({href: '/category-a/doc2', label: 'Doc 2'}),
|
||||
// This link points to Category B's generated-index
|
||||
testLink({href: '/category-b', label: 'Go to Category B'}),
|
||||
],
|
||||
});
|
||||
|
||||
const categoryB: PropSidebarItemCategory = testCategory({
|
||||
label: 'Category B',
|
||||
href: '/category-b',
|
||||
items: [
|
||||
testLink({href: '/category-b/item1', label: 'Item 1'}),
|
||||
testLink({href: '/category-b/item2', label: 'Item 2'}),
|
||||
],
|
||||
});
|
||||
|
||||
const sidebar: PropSidebar = [categoryA, categoryB];
|
||||
|
||||
const mockUseCurrentSidebarCategory =
|
||||
createUseCurrentSidebarCategoryMock(sidebar);
|
||||
|
||||
// When visiting /category-b, we should get Category B (the owner),
|
||||
// not Category A (which just has a link to it)
|
||||
expect(mockUseCurrentSidebarCategory('/category-b')).toEqual(categoryB);
|
||||
});
|
||||
});
|
||||
|
||||
describe('useCurrentSidebarSiblings', () => {
|
||||
|
|
@ -895,10 +805,10 @@ describe('useCurrentSidebarSiblings', () => {
|
|||
testCategory(),
|
||||
];
|
||||
|
||||
const mockUseCurrentSidebarSiblings =
|
||||
const mockUseCurrentSidebarCategory =
|
||||
createUseCurrentSidebarSiblingsMock(sidebar);
|
||||
|
||||
expect(mockUseCurrentSidebarSiblings('/cat')).toEqual(category.items);
|
||||
expect(mockUseCurrentSidebarCategory('/cat')).toEqual(category.items);
|
||||
});
|
||||
|
||||
it('works for sidebar root', () => {
|
||||
|
|
@ -913,10 +823,10 @@ describe('useCurrentSidebarSiblings', () => {
|
|||
testCategory(),
|
||||
];
|
||||
|
||||
const mockUseCurrentSidebarSiblings =
|
||||
const mockUseCurrentSidebarCategory =
|
||||
createUseCurrentSidebarSiblingsMock(sidebar);
|
||||
|
||||
expect(mockUseCurrentSidebarSiblings('/rootLink')).toEqual(sidebar);
|
||||
expect(mockUseCurrentSidebarCategory('/rootLink')).toEqual(sidebar);
|
||||
});
|
||||
|
||||
it('works for nested sidebar category', () => {
|
||||
|
|
@ -942,13 +852,10 @@ describe('useCurrentSidebarSiblings', () => {
|
|||
});
|
||||
|
||||
it('works for category link item', () => {
|
||||
const pathname = '/my/link/path';
|
||||
const nonDocLink = testLink({href: pathname});
|
||||
const docLink = testLink({href: pathname, docId: 'doc1'});
|
||||
|
||||
const link = testLink({href: '/my/link/path'});
|
||||
const category: PropSidebarItemCategory = testCategory({
|
||||
href: '/cat1',
|
||||
items: [testLink(), testLink(), nonDocLink, docLink, testCategory()],
|
||||
items: [testLink(), testLink(), link, testCategory()],
|
||||
});
|
||||
const sidebar: PropSidebar = [
|
||||
testLink(),
|
||||
|
|
@ -957,24 +864,23 @@ describe('useCurrentSidebarSiblings', () => {
|
|||
testCategory(),
|
||||
];
|
||||
|
||||
const mockUseCurrentSidebarSiblings =
|
||||
const mockUseCurrentSidebarCategory =
|
||||
createUseCurrentSidebarSiblingsMock(sidebar);
|
||||
|
||||
expect(mockUseCurrentSidebarSiblings(pathname)).toEqual(category.items);
|
||||
expect(mockUseCurrentSidebarCategory('/my/link/path')).toEqual(
|
||||
category.items,
|
||||
);
|
||||
});
|
||||
|
||||
it('works for nested category link item', () => {
|
||||
const pathname = '/my/link/path';
|
||||
const nonDocLink = testLink({href: pathname});
|
||||
const docLink = testLink({href: pathname, docId: 'doc1'});
|
||||
|
||||
const link = testLink({href: '/my/link/path'});
|
||||
const category2: PropSidebarItemCategory = testCategory({
|
||||
href: '/cat2',
|
||||
items: [testLink(), testLink(), nonDocLink, testCategory()],
|
||||
items: [testLink(), testLink(), link, testCategory()],
|
||||
});
|
||||
const category1: PropSidebarItemCategory = testCategory({
|
||||
href: '/cat1',
|
||||
items: [testLink(), testLink(), category2, docLink, testCategory()],
|
||||
items: [testLink(), testLink(), category2, testCategory()],
|
||||
});
|
||||
const sidebar: PropSidebar = [
|
||||
testLink(),
|
||||
|
|
@ -983,16 +889,18 @@ describe('useCurrentSidebarSiblings', () => {
|
|||
testCategory(),
|
||||
];
|
||||
|
||||
const mockUseCurrentSidebarSiblings =
|
||||
const mockUseCurrentSidebarCategory =
|
||||
createUseCurrentSidebarSiblingsMock(sidebar);
|
||||
|
||||
expect(mockUseCurrentSidebarSiblings(pathname)).toEqual(category1.items);
|
||||
expect(mockUseCurrentSidebarCategory('/my/link/path')).toEqual(
|
||||
category2.items,
|
||||
);
|
||||
});
|
||||
|
||||
it('throws when sidebar is missing', () => {
|
||||
const mockUseCurrentSidebarSiblings = createUseCurrentSidebarSiblingsMock();
|
||||
const mockUseCurrentSidebarCategory = createUseCurrentSidebarSiblingsMock();
|
||||
expect(() =>
|
||||
mockUseCurrentSidebarSiblings('/cat'),
|
||||
mockUseCurrentSidebarCategory('/cat'),
|
||||
).toThrowErrorMatchingInlineSnapshot(
|
||||
`"Unexpected: cant find current sidebar in context"`,
|
||||
);
|
||||
|
|
|
|||
|
|
@ -234,22 +234,15 @@ function getSidebarBreadcrumbs({
|
|||
}): PropSidebarBreadcrumbsItem[] {
|
||||
const breadcrumbs: PropSidebarBreadcrumbsItem[] = [];
|
||||
|
||||
function extract(items: PropSidebarItem[]): boolean {
|
||||
function extract(items: PropSidebarItem[]) {
|
||||
for (const item of items) {
|
||||
// Extract category item
|
||||
if (item.type === 'category') {
|
||||
if (isSamePath(item.href, pathname) || extract(item.items)) {
|
||||
breadcrumbs.unshift(item);
|
||||
return true;
|
||||
}
|
||||
}
|
||||
// Extract doc item
|
||||
else if (
|
||||
item.type === 'link' &&
|
||||
item.docId &&
|
||||
isSamePath(item.href, pathname)
|
||||
if (
|
||||
(item.type === 'category' &&
|
||||
(isSamePath(item.href, pathname) || extract(item.items))) ||
|
||||
(item.type === 'link' && isSamePath(item.href, pathname))
|
||||
) {
|
||||
if (!onlyCategories) {
|
||||
const filtered = onlyCategories && item.type !== 'category';
|
||||
if (!filtered) {
|
||||
breadcrumbs.unshift(item);
|
||||
}
|
||||
return true;
|
||||
|
|
|
|||
|
|
@ -97,7 +97,6 @@ async function doProcessDocMetadata({
|
|||
siteDir,
|
||||
siteConfig: {
|
||||
markdown: {parseFrontMatter},
|
||||
future: {experimental_vcs: vcs},
|
||||
},
|
||||
} = context;
|
||||
|
||||
|
|
@ -126,7 +125,6 @@ async function doProcessDocMetadata({
|
|||
filePath,
|
||||
options,
|
||||
lastUpdateFrontMatter,
|
||||
vcs,
|
||||
);
|
||||
|
||||
// E.g. api/plugins/myDoc -> myDoc; myDoc -> myDoc
|
||||
|
|
|
|||
|
|
@ -8,7 +8,6 @@
|
|||
import * as path from 'path';
|
||||
import {fromPartial} from '@total-typescript/shoehorn';
|
||||
import {DEFAULT_PARSE_FRONT_MATTER} from '@docusaurus/utils/src';
|
||||
import {DEFAULT_VCS_CONFIG} from '@docusaurus/utils';
|
||||
import {readVersionsMetadata} from '../version';
|
||||
import {DEFAULT_OPTIONS} from '../../options';
|
||||
import {loadVersion} from '../loadVersion';
|
||||
|
|
@ -38,9 +37,6 @@ async function siteFixture(fixture: string) {
|
|||
markdown: {
|
||||
parseFrontMatter: DEFAULT_PARSE_FRONT_MATTER,
|
||||
},
|
||||
future: {
|
||||
experimental_vcs: DEFAULT_VCS_CONFIG,
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
|
|
|
|||
|
|
@ -98,7 +98,6 @@ async function processPageSourceFile(
|
|||
): Promise<Metadata | undefined> {
|
||||
const {context, options, contentPaths} = params;
|
||||
const {siteConfig, baseUrl, siteDir, i18n} = context;
|
||||
const vcs = siteConfig.future.experimental_vcs;
|
||||
const {editUrl} = options;
|
||||
|
||||
// Lookup in localized folder in priority
|
||||
|
|
@ -181,7 +180,6 @@ async function processPageSourceFile(
|
|||
source,
|
||||
options,
|
||||
frontMatter.last_update,
|
||||
vcs,
|
||||
);
|
||||
|
||||
if (isDraft({frontMatter})) {
|
||||
|
|
|
|||
|
|
@ -13,6 +13,7 @@ import {
|
|||
addTrailingPathSeparator,
|
||||
createAbsoluteFilePathMatcher,
|
||||
getContentPathList,
|
||||
DEFAULT_PLUGIN_ID,
|
||||
} from '@docusaurus/utils';
|
||||
import {createMDXLoaderRule} from '@docusaurus/mdx-loader';
|
||||
import {createAllRoutes} from './routes';
|
||||
|
|
@ -37,7 +38,7 @@ export default async function pluginContentPages(
|
|||
generatedFilesDir,
|
||||
'docusaurus-plugin-content-pages',
|
||||
);
|
||||
const dataDir = path.join(pluginDataDirRoot, options.id);
|
||||
const dataDir = path.join(pluginDataDirRoot, options.id ?? DEFAULT_PLUGIN_ID);
|
||||
|
||||
async function createPagesMDXLoaderRule(): Promise<RuleSetRule> {
|
||||
const {
|
||||
|
|
|
|||
|
|
@ -14,12 +14,11 @@ import {
|
|||
RouteBasePathSchema,
|
||||
URISchema,
|
||||
} from '@docusaurus/utils-validation';
|
||||
import {DEFAULT_PLUGIN_ID, GlobExcludeDefault} from '@docusaurus/utils';
|
||||
import {GlobExcludeDefault} from '@docusaurus/utils';
|
||||
import type {OptionValidationContext} from '@docusaurus/types';
|
||||
import type {PluginOptions, Options} from '@docusaurus/plugin-content-pages';
|
||||
|
||||
export const DEFAULT_OPTIONS: PluginOptions = {
|
||||
id: DEFAULT_PLUGIN_ID,
|
||||
path: 'src/pages', // Path to data on filesystem, relative to site dir.
|
||||
routeBasePath: '/', // URL Route.
|
||||
include: ['**/*.{js,jsx,ts,tsx,md,mdx}'], // Extensions to include.
|
||||
|
|
|
|||
|
|
@ -19,7 +19,7 @@ declare module '@docusaurus/plugin-content-pages' {
|
|||
};
|
||||
|
||||
export type PluginOptions = MDXOptions & {
|
||||
id: string;
|
||||
id?: string;
|
||||
path: string;
|
||||
routeBasePath: string;
|
||||
include: string[];
|
||||
|
|
|
|||
|
|
@ -6,14 +6,12 @@
|
|||
*/
|
||||
|
||||
import {fromPartial} from '@total-typescript/shoehorn';
|
||||
import {DEFAULT_VCS_CONFIG} from '@docusaurus/utils';
|
||||
import createSitemap from '../createSitemap';
|
||||
import type {PluginOptions} from '../options';
|
||||
import type {DocusaurusConfig, RouteConfig} from '@docusaurus/types';
|
||||
|
||||
const siteConfig: DocusaurusConfig = fromPartial({
|
||||
url: 'https://example.com',
|
||||
future: {experimental_vcs: DEFAULT_VCS_CONFIG},
|
||||
});
|
||||
|
||||
const options: PluginOptions = {
|
||||
|
|
|
|||
|
|
@ -6,7 +6,6 @@
|
|||
*/
|
||||
|
||||
import {fromPartial} from '@total-typescript/shoehorn';
|
||||
import {TEST_VCS} from '@docusaurus/utils';
|
||||
import {createSitemapItem} from '../createSitemapItem';
|
||||
import {DEFAULT_OPTIONS} from '../options';
|
||||
import type {PluginOptions} from '../options';
|
||||
|
|
@ -14,7 +13,6 @@ import type {DocusaurusConfig, RouteConfig} from '@docusaurus/types';
|
|||
|
||||
const siteConfig: DocusaurusConfig = fromPartial({
|
||||
url: 'https://example.com',
|
||||
future: {experimental_vcs: TEST_VCS},
|
||||
});
|
||||
|
||||
function test(params: {
|
||||
|
|
|
|||
|
|
@ -6,17 +6,16 @@
|
|||
*/
|
||||
|
||||
import {applyTrailingSlash} from '@docusaurus/utils-common';
|
||||
import {normalizeUrl} from '@docusaurus/utils';
|
||||
import {getLastUpdate, normalizeUrl} from '@docusaurus/utils';
|
||||
import type {LastModOption, SitemapItem} from './types';
|
||||
import type {DocusaurusConfig, RouteConfig, VcsConfig} from '@docusaurus/types';
|
||||
import type {DocusaurusConfig, RouteConfig} from '@docusaurus/types';
|
||||
import type {PluginOptions} from './options';
|
||||
|
||||
async function getRouteLastUpdatedAt(
|
||||
route: RouteConfig,
|
||||
vcs: Pick<VcsConfig, 'getFileLastUpdateInfo'>,
|
||||
): Promise<number | null | undefined> {
|
||||
// Important to bail-out early here
|
||||
// This can lead to duplicated VCS calls and performance problems
|
||||
// This can lead to duplicated getLastUpdate() calls and performance problems
|
||||
// See https://github.com/facebook/docusaurus/pull/11211
|
||||
if (route.metadata?.lastUpdatedAt === null) {
|
||||
return null;
|
||||
|
|
@ -25,10 +24,8 @@ async function getRouteLastUpdatedAt(
|
|||
return route.metadata?.lastUpdatedAt;
|
||||
}
|
||||
if (route.metadata?.sourceFilePath) {
|
||||
const lastUpdateInfo = await vcs.getFileLastUpdateInfo(
|
||||
route.metadata?.sourceFilePath,
|
||||
);
|
||||
return lastUpdateInfo?.timestamp ?? null;
|
||||
const lastUpdate = await getLastUpdate(route.metadata?.sourceFilePath);
|
||||
return lastUpdate?.lastUpdatedAt ?? null;
|
||||
}
|
||||
|
||||
return undefined;
|
||||
|
|
@ -49,16 +46,14 @@ function formatLastmod(timestamp: number, lastmodOption: LastModOption) {
|
|||
async function getRouteLastmod({
|
||||
route,
|
||||
lastmod,
|
||||
vcs,
|
||||
}: {
|
||||
route: RouteConfig;
|
||||
lastmod: LastModOption | null;
|
||||
vcs: Pick<VcsConfig, 'getFileLastUpdateInfo'>;
|
||||
}): Promise<string | null> {
|
||||
if (lastmod === null) {
|
||||
return null;
|
||||
}
|
||||
const lastUpdatedAt = (await getRouteLastUpdatedAt(route, vcs)) ?? null;
|
||||
const lastUpdatedAt = (await getRouteLastUpdatedAt(route)) ?? null;
|
||||
return lastUpdatedAt ? formatLastmod(lastUpdatedAt, lastmod) : null;
|
||||
}
|
||||
|
||||
|
|
@ -82,10 +77,6 @@ export async function createSitemapItem({
|
|||
]),
|
||||
changefreq,
|
||||
priority,
|
||||
lastmod: await getRouteLastmod({
|
||||
route,
|
||||
lastmod,
|
||||
vcs: siteConfig.future.experimental_vcs,
|
||||
}),
|
||||
lastmod: await getRouteLastmod({route, lastmod}),
|
||||
};
|
||||
}
|
||||
|
|
|
|||
|
|
@ -14,23 +14,16 @@ import styles from './styles.module.css';
|
|||
|
||||
type Token = Props['line'][number];
|
||||
|
||||
// This <br/ seems useful when the line has no content to prevent collapsing.
|
||||
// For code blocks with "diff" languages, this makes the empty lines collapse to
|
||||
// zero height lines, which is undesirable.
|
||||
// See also https://github.com/facebook/docusaurus/pull/11565
|
||||
function LineBreak() {
|
||||
return <br />;
|
||||
}
|
||||
|
||||
// Replaces single lines with '\n' by '' so that we don't end up with
|
||||
// duplicate line breaks (the '\n' + the artificial <br/> above)
|
||||
// see also https://github.com/facebook/docusaurus/pull/11565
|
||||
// Replaces '\n' by ''
|
||||
// Historical code, not sure why we even need this :/
|
||||
function fixLineBreak(line: Token[]) {
|
||||
const singleLineBreakToken =
|
||||
line.length === 1 && line[0]!.content === '\n' ? line[0] : undefined;
|
||||
|
||||
if (singleLineBreakToken) {
|
||||
return [{...singleLineBreakToken, content: ''}];
|
||||
}
|
||||
|
||||
return line;
|
||||
}
|
||||
|
||||
|
|
@ -42,6 +35,7 @@ export default function CodeBlockLine({
|
|||
getTokenProps,
|
||||
}: Props): ReactNode {
|
||||
const line = fixLineBreak(lineProp);
|
||||
|
||||
const lineProps = getLineProps({
|
||||
line,
|
||||
className: clsx(classNames, showLineNumbers && styles.codeLine),
|
||||
|
|
@ -57,7 +51,7 @@ export default function CodeBlockLine({
|
|||
});
|
||||
|
||||
return (
|
||||
<div {...lineProps}>
|
||||
<span {...lineProps}>
|
||||
{showLineNumbers ? (
|
||||
<>
|
||||
<span className={styles.codeLineNumber} />
|
||||
|
|
@ -66,7 +60,7 @@ export default function CodeBlockLine({
|
|||
) : (
|
||||
lineTokens
|
||||
)}
|
||||
<LineBreak />
|
||||
</div>
|
||||
<br />
|
||||
</span>
|
||||
);
|
||||
}
|
||||
|
|
|
|||
|
|
@ -64,7 +64,7 @@ export default function DropdownNavbarItemDesktop({
|
|||
{...props}
|
||||
onClick={props.to ? undefined : (e) => e.preventDefault()}
|
||||
onKeyDown={(e) => {
|
||||
if (e.key === 'Enter' || e.key === ' ') {
|
||||
if (e.key === 'Enter') {
|
||||
e.preventDefault();
|
||||
setShowDropdown(!showDropdown);
|
||||
}
|
||||
|
|
|
|||
|
|
@ -49,6 +49,18 @@ export default async function themeMermaid(): Promise<Plugin<void>> {
|
|||
),
|
||||
}),
|
||||
],
|
||||
|
||||
// Workaround for weird Rspack/SWC issue
|
||||
// See https://github.com/facebook/docusaurus/issues/11430
|
||||
resolve: {
|
||||
alias: {
|
||||
...(elkLayoutEnabled
|
||||
? {}
|
||||
: {
|
||||
'@mermaid-js/layout-elk': false,
|
||||
}),
|
||||
},
|
||||
},
|
||||
};
|
||||
},
|
||||
};
|
||||
|
|
|
|||
|
|
@ -33,7 +33,7 @@
|
|||
"copy:watch": "node ../../admin/scripts/copyUntypedFiles.js --watch"
|
||||
},
|
||||
"dependencies": {
|
||||
"@docsearch/react": "^3.9.0 || ^4.3.2",
|
||||
"@docsearch/react": "^3.9.0 || ^4.1.0",
|
||||
"@docusaurus/core": "3.9.2",
|
||||
"@docusaurus/logger": "3.9.2",
|
||||
"@docusaurus/plugin-content-docs": "3.9.2",
|
||||
|
|
|
|||
|
|
@ -436,95 +436,5 @@ describe('validateThemeConfig', () => {
|
|||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('Ask AI suggestedQuestions', () => {
|
||||
it('accepts suggestedQuestions as true', () => {
|
||||
const algolia = {
|
||||
appId: 'BH4D9OD16A',
|
||||
indexName: 'index',
|
||||
apiKey: 'apiKey',
|
||||
askAi: {
|
||||
assistantId: 'my-assistant-id',
|
||||
suggestedQuestions: true,
|
||||
},
|
||||
} satisfies AlgoliaInput;
|
||||
|
||||
expect(testValidateThemeConfig(algolia)).toEqual({
|
||||
algolia: {
|
||||
...DEFAULT_CONFIG,
|
||||
...algolia,
|
||||
askAi: {
|
||||
indexName: algolia.indexName,
|
||||
apiKey: algolia.apiKey,
|
||||
appId: algolia.appId,
|
||||
assistantId: 'my-assistant-id',
|
||||
suggestedQuestions: true,
|
||||
},
|
||||
},
|
||||
});
|
||||
});
|
||||
|
||||
it('accepts suggestedQuestions as false', () => {
|
||||
const algolia = {
|
||||
appId: 'BH4D9OD16A',
|
||||
indexName: 'index',
|
||||
apiKey: 'apiKey',
|
||||
askAi: {
|
||||
assistantId: 'my-assistant-id',
|
||||
suggestedQuestions: false,
|
||||
},
|
||||
} satisfies AlgoliaInput;
|
||||
|
||||
expect(testValidateThemeConfig(algolia)).toEqual({
|
||||
algolia: {
|
||||
...DEFAULT_CONFIG,
|
||||
...algolia,
|
||||
askAi: {
|
||||
indexName: algolia.indexName,
|
||||
apiKey: algolia.apiKey,
|
||||
appId: algolia.appId,
|
||||
assistantId: 'my-assistant-id',
|
||||
suggestedQuestions: false,
|
||||
},
|
||||
},
|
||||
});
|
||||
});
|
||||
|
||||
it('rejects invalid suggestedQuestions type', () => {
|
||||
const algolia: AlgoliaInput = {
|
||||
appId: 'BH4D9OD16A',
|
||||
indexName: 'index',
|
||||
apiKey: 'apiKey',
|
||||
askAi: {
|
||||
assistantId: 'my-assistant-id',
|
||||
// @ts-expect-error: expected type error
|
||||
suggestedQuestions: 'invalid-string',
|
||||
},
|
||||
};
|
||||
expect(() =>
|
||||
testValidateThemeConfig(algolia),
|
||||
).toThrowErrorMatchingInlineSnapshot(
|
||||
`""algolia.askAi.suggestedQuestions" must be a boolean"`,
|
||||
);
|
||||
});
|
||||
|
||||
it('rejects suggestedQuestions as number', () => {
|
||||
const algolia: AlgoliaInput = {
|
||||
appId: 'BH4D9OD16A',
|
||||
indexName: 'index',
|
||||
apiKey: 'apiKey',
|
||||
askAi: {
|
||||
assistantId: 'my-assistant-id',
|
||||
// @ts-expect-error: expected type error
|
||||
suggestedQuestions: 123,
|
||||
},
|
||||
};
|
||||
expect(() =>
|
||||
testValidateThemeConfig(algolia),
|
||||
).toThrowErrorMatchingInlineSnapshot(
|
||||
`""algolia.askAi.suggestedQuestions" must be a boolean"`,
|
||||
);
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
|
|
|
|||
|
|
@ -91,7 +91,7 @@ export function useAlgoliaAskAi(props: DocSearchV4PropsLite): UseAskAiResult {
|
|||
}, []);
|
||||
|
||||
const extraAskAiProps: UseAskAiResult['extraAskAiProps'] = {
|
||||
askAi: askAi as any,
|
||||
askAi,
|
||||
canHandleAskAi,
|
||||
isAskAiActive,
|
||||
onAskAiToggle,
|
||||
|
|
|
|||
|
|
@ -43,7 +43,7 @@ export function useSearchResultUrlProcessor(): (url: string) => string {
|
|||
}
|
||||
|
||||
// Otherwise => transform to relative URL for SPA navigation
|
||||
const relativeUrl = `${parsedURL.pathname}${parsedURL.search}${parsedURL.hash}`;
|
||||
const relativeUrl = `${parsedURL.pathname + parsedURL.hash}`;
|
||||
|
||||
return withBaseUrl(
|
||||
replacePathname(relativeUrl, replaceSearchResultPathname),
|
||||
|
|
|
|||
|
|
@ -17,7 +17,6 @@ declare module '@docusaurus/theme-search-algolia' {
|
|||
import type {FacetFilters} from 'algoliasearch/lite';
|
||||
|
||||
// The config after normalization (e.g. AskAI string -> object)
|
||||
// This matches DocSearch v4.3+ AskAi configuration
|
||||
export type AskAiConfig = {
|
||||
indexName: string;
|
||||
apiKey: string;
|
||||
|
|
@ -26,7 +25,6 @@ declare module '@docusaurus/theme-search-algolia' {
|
|||
searchParameters?: {
|
||||
facetFilters?: FacetFilters;
|
||||
};
|
||||
suggestedQuestions?: boolean;
|
||||
};
|
||||
|
||||
// DocSearch props that Docusaurus exposes directly through props forwarding
|
||||
|
|
|
|||
|
|
@ -61,7 +61,7 @@ type DocSearchProps = Omit<
|
|||
|
||||
// extend DocSearchProps for v4 features
|
||||
// TODO Docusaurus v4: cleanup after we drop support for DocSearch v3
|
||||
interface DocSearchV4Props extends Omit<DocSearchProps, 'askAi'> {
|
||||
interface DocSearchV4Props extends DocSearchProps {
|
||||
indexName: string;
|
||||
askAi?: ThemeConfigAlgolia['askAi'];
|
||||
translations?: DocSearchTranslations;
|
||||
|
|
@ -199,7 +199,7 @@ function useSearchParameters({
|
|||
|
||||
function DocSearch({externalUrlRegex, ...props}: DocSearchV4Props) {
|
||||
const navigator = useNavigator({externalUrlRegex});
|
||||
const searchParameters = useSearchParameters({...props} as DocSearchProps);
|
||||
const searchParameters = useSearchParameters({...props});
|
||||
const transformItems = useTransformItems(props);
|
||||
const transformSearchClient = useTransformSearchClient();
|
||||
|
||||
|
|
@ -301,7 +301,7 @@ function DocSearch({externalUrlRegex, ...props}: DocSearchV4Props) {
|
|||
resultsFooterComponent,
|
||||
})}
|
||||
placeholder={currentPlaceholder}
|
||||
{...(props as any)}
|
||||
{...props}
|
||||
translations={props.translations?.modal ?? translations.modal}
|
||||
searchParameters={searchParameters}
|
||||
{...extraAskAiProps}
|
||||
|
|
@ -312,15 +312,9 @@ function DocSearch({externalUrlRegex, ...props}: DocSearchV4Props) {
|
|||
);
|
||||
}
|
||||
|
||||
export default function SearchBar(props: Partial<DocSearchV4Props>): ReactNode {
|
||||
export default function SearchBar(): ReactNode {
|
||||
const {siteConfig} = useDocusaurusContext();
|
||||
|
||||
const docSearchProps: DocSearchV4Props = {
|
||||
...(siteConfig.themeConfig.algolia as DocSearchV4Props),
|
||||
// Let props override theme config
|
||||
// See https://github.com/facebook/docusaurus/pull/11581
|
||||
...props,
|
||||
};
|
||||
|
||||
return <DocSearch {...docSearchProps} />;
|
||||
return (
|
||||
<DocSearch {...(siteConfig.themeConfig.algolia as DocSearchV4Props)} />
|
||||
);
|
||||
}
|
||||
|
|
|
|||
|
|
@ -75,7 +75,6 @@ export const Schema = Joi.object<ThemeConfig>({
|
|||
searchParameters: Joi.object({
|
||||
facetFilters: FacetFiltersSchema.optional(),
|
||||
}).optional(),
|
||||
suggestedQuestions: Joi.boolean().optional(),
|
||||
}),
|
||||
)
|
||||
.custom(
|
||||
|
|
|
|||
|
|
@ -1,60 +1,60 @@
|
|||
{
|
||||
"theme.SearchBar.label": "Procurar",
|
||||
"theme.SearchBar.label": "Buscar",
|
||||
"theme.SearchBar.seeAll": "Ver todos os {count} resultados",
|
||||
"theme.SearchModal.askAiScreen.afterToolCallText": "Procurou por",
|
||||
"theme.SearchModal.askAiScreen.copyButtonCopiedText": "Copiado!",
|
||||
"theme.SearchModal.askAiScreen.copyButtonText": "Copiar",
|
||||
"theme.SearchModal.askAiScreen.copyButtonTitle": "Copiar",
|
||||
"theme.SearchModal.askAiScreen.disclaimerText": "Respostas geradas por IA podem cometer erros. Verifique.",
|
||||
"theme.SearchModal.askAiScreen.afterToolCallText": "Searched for",
|
||||
"theme.SearchModal.askAiScreen.copyButtonCopiedText": "Copied!",
|
||||
"theme.SearchModal.askAiScreen.copyButtonText": "Copy",
|
||||
"theme.SearchModal.askAiScreen.copyButtonTitle": "Copy",
|
||||
"theme.SearchModal.askAiScreen.disclaimerText": "Answers are generated with AI which can make mistakes. Verify responses.",
|
||||
"theme.SearchModal.askAiScreen.dislikeButtonTitle": "Dislike",
|
||||
"theme.SearchModal.askAiScreen.duringToolCallText": "Procurando por ",
|
||||
"theme.SearchModal.askAiScreen.duringToolCallText": "Searching for ",
|
||||
"theme.SearchModal.askAiScreen.likeButtonTitle": "Like",
|
||||
"theme.SearchModal.askAiScreen.preToolCallText": "Procurando...",
|
||||
"theme.SearchModal.askAiScreen.relatedSourcesText": "Resultados relacionados",
|
||||
"theme.SearchModal.askAiScreen.thanksForFeedbackText": "Obrigado pelo seu feedback!",
|
||||
"theme.SearchModal.askAiScreen.thinkingText": "Pensando...",
|
||||
"theme.SearchModal.askAiScreen.preToolCallText": "Searching...",
|
||||
"theme.SearchModal.askAiScreen.relatedSourcesText": "Related sources",
|
||||
"theme.SearchModal.askAiScreen.thanksForFeedbackText": "Thanks for your feedback!",
|
||||
"theme.SearchModal.askAiScreen.thinkingText": "Thinking...",
|
||||
"theme.SearchModal.errorScreen.helpText": "Talvez você deva verificar sua conexão de rede.",
|
||||
"theme.SearchModal.errorScreen.titleText": "Não foi possível obter resultados",
|
||||
"theme.SearchModal.footer.backToSearchText": "Voltar para pesquisa",
|
||||
"theme.SearchModal.footer.backToSearchText": "Back to search",
|
||||
"theme.SearchModal.footer.closeKeyAriaLabel": "Tecla Esc",
|
||||
"theme.SearchModal.footer.closeText": "fechar",
|
||||
"theme.SearchModal.footer.navigateDownKeyAriaLabel": "Seta para baixo",
|
||||
"theme.SearchModal.footer.navigateText": "navegar",
|
||||
"theme.SearchModal.footer.navigateUpKeyAriaLabel": "Seta para cima",
|
||||
"theme.SearchModal.footer.searchByText": "Esta pesquisa utiliza",
|
||||
"theme.SearchModal.footer.searchByText": "Esta busca utiliza",
|
||||
"theme.SearchModal.footer.selectKeyAriaLabel": "Tecla Enter",
|
||||
"theme.SearchModal.footer.selectText": "selecionar",
|
||||
"theme.SearchModal.footer.submitQuestionText": "Enviar pergunta",
|
||||
"theme.SearchModal.footer.submitQuestionText": "Submit question",
|
||||
"theme.SearchModal.noResultsScreen.noResultsText": "Nenhum resultado para",
|
||||
"theme.SearchModal.noResultsScreen.reportMissingResultsLinkText": "Nos avise.",
|
||||
"theme.SearchModal.noResultsScreen.reportMissingResultsText": "Você acha que esta pesquisa deveria retornar resultados?",
|
||||
"theme.SearchModal.noResultsScreen.suggestedQueryText": "Tente procurar por",
|
||||
"theme.SearchModal.placeholder": "Procurar documentos",
|
||||
"theme.SearchModal.resultsScreen.askAiPlaceholder": "Pergunte para a IA: ",
|
||||
"theme.SearchModal.searchBox.backToKeywordSearchButtonAriaLabel": "Voltar para a pesquisa por palavra-chave",
|
||||
"theme.SearchModal.searchBox.backToKeywordSearchButtonText": "Voltar para a pesquisa por palavra-chave",
|
||||
"theme.SearchModal.noResultsScreen.reportMissingResultsText": "Você acha que esta busca deveria retornar resultados?",
|
||||
"theme.SearchModal.noResultsScreen.suggestedQueryText": "Tente buscar por",
|
||||
"theme.SearchModal.placeholder": "Buscar documentos",
|
||||
"theme.SearchModal.resultsScreen.askAiPlaceholder": "Ask AI: ",
|
||||
"theme.SearchModal.searchBox.backToKeywordSearchButtonAriaLabel": "Back to keyword search",
|
||||
"theme.SearchModal.searchBox.backToKeywordSearchButtonText": "Back to keyword search",
|
||||
"theme.SearchModal.searchBox.cancelButtonText": "Cancelar",
|
||||
"theme.SearchModal.searchBox.enterKeyHint": "procurar",
|
||||
"theme.SearchModal.searchBox.enterKeyHint": "search",
|
||||
"theme.SearchModal.searchBox.enterKeyHintAskAi": "enter",
|
||||
"theme.SearchModal.searchBox.placeholderText": "Procurar na documentação",
|
||||
"theme.SearchModal.searchBox.placeholderTextAskAi": "Pergunte outra coisa...",
|
||||
"theme.SearchModal.searchBox.placeholderTextAskAiStreaming": "Respondendo...",
|
||||
"theme.SearchModal.searchBox.resetButtonTitle": "Limpar a pesquisa",
|
||||
"theme.SearchModal.searchBox.searchInputLabel": "Procurar",
|
||||
"theme.SearchModal.searchBox.placeholderText": "Search docs",
|
||||
"theme.SearchModal.searchBox.placeholderTextAskAi": "Ask another question...",
|
||||
"theme.SearchModal.searchBox.placeholderTextAskAiStreaming": "Answering...",
|
||||
"theme.SearchModal.searchBox.resetButtonTitle": "Limpar a busca",
|
||||
"theme.SearchModal.searchBox.searchInputLabel": "Search",
|
||||
"theme.SearchModal.startScreen.favoriteSearchesTitle": "Favorito",
|
||||
"theme.SearchModal.startScreen.noRecentSearchesText": "Nenhuma pesquisa recente",
|
||||
"theme.SearchModal.startScreen.recentConversationsTitle": "Conversas recentes",
|
||||
"theme.SearchModal.startScreen.noRecentSearchesText": "Nenhuma busca recente",
|
||||
"theme.SearchModal.startScreen.recentConversationsTitle": "Recent conversations",
|
||||
"theme.SearchModal.startScreen.recentSearchesTitle": "Recente",
|
||||
"theme.SearchModal.startScreen.removeFavoriteSearchButtonTitle": "Remover esta pesquisa dos favoritos",
|
||||
"theme.SearchModal.startScreen.removeFavoriteSearchButtonTitle": "Remover esta busca dos favoritos",
|
||||
"theme.SearchModal.startScreen.removeRecentConversationButtonTitle": "Remove this conversation from history",
|
||||
"theme.SearchModal.startScreen.removeRecentSearchButtonTitle": "Remover esta pesquisa do histórico",
|
||||
"theme.SearchModal.startScreen.saveRecentSearchButtonTitle": "Salvar esta pesquisa",
|
||||
"theme.SearchModal.startScreen.removeRecentSearchButtonTitle": "Remover esta busca do histórico",
|
||||
"theme.SearchModal.startScreen.saveRecentSearchButtonTitle": "Salvar esta busca",
|
||||
"theme.SearchPage.algoliaLabel": "Desenvolvido por Algolia",
|
||||
"theme.SearchPage.documentsFound.plurals": "Um documento encontrado|{count} documentos encontrados",
|
||||
"theme.SearchPage.emptyResultsTitle": "Pesquisa da documentação",
|
||||
"theme.SearchPage.existingResultsTitle": "Resultado da pesquisa por \"{query}\"",
|
||||
"theme.SearchPage.emptyResultsTitle": "Busca da documentação",
|
||||
"theme.SearchPage.existingResultsTitle": "Resultado da busca por \"{query}\"",
|
||||
"theme.SearchPage.fetchingNewResults": "Trazendo novos resultados...",
|
||||
"theme.SearchPage.inputLabel": "Procurar",
|
||||
"theme.SearchPage.inputPlaceholder": "Digite sua pesquisa aqui",
|
||||
"theme.SearchPage.inputLabel": "Buscar",
|
||||
"theme.SearchPage.inputPlaceholder": "Digite sua busca aqui",
|
||||
"theme.SearchPage.noResultsText": "Nenhum resultado foi encontrado"
|
||||
}
|
||||
|
|
|
|||
|
|
@ -33,7 +33,6 @@ export type FasterConfig = {
|
|||
rspackBundler: boolean;
|
||||
rspackPersistentCache: boolean;
|
||||
ssgWorkerThreads: boolean;
|
||||
gitEagerVcs: boolean;
|
||||
};
|
||||
|
||||
export type FutureV4Config = {
|
||||
|
|
@ -41,53 +40,6 @@ export type FutureV4Config = {
|
|||
useCssCascadeLayers: boolean;
|
||||
};
|
||||
|
||||
// VCS (Version Control System) info about a given change, e.g., a git commit.
|
||||
// The agnostic term "VCS" is used instead of "git" to acknowledge the existence
|
||||
// of other version control systems, and external systems like CMSs and i18n
|
||||
// translation SaaS (e.g., Crowdin)
|
||||
export type VcsChangeInfo = {timestamp: number; author: string};
|
||||
|
||||
export type VscInitializeParams = {
|
||||
siteDir: string;
|
||||
// TODO could it be useful to provide all plugins getPathsToWatch() here?
|
||||
// this could give the opportunity to find out all VCS roots ahead of times
|
||||
// this is mostly useful for multi-git-repo setups, can be added later
|
||||
};
|
||||
|
||||
// VCS (Version Control System) config hooks to get file change info.
|
||||
// This lets you override and customize the default Docusaurus behavior.
|
||||
// This can be useful to optimize calls or when using something else than git
|
||||
// See https://github.com/facebook/docusaurus/issues/11208
|
||||
// See https://github.com/e18e/ecosystem-issues/issues/216
|
||||
export type VcsConfig = {
|
||||
/**
|
||||
* Initialize the VCS system.
|
||||
* This is notably useful to pre-read eagerly a full Git repository so that
|
||||
* all the files first/last update info can be retrieved efficiently later
|
||||
*
|
||||
* Note: for now, this function is synchronous on purpose, it can be used to
|
||||
* start warming up the VCS by reading eagerly, but we don't want to delay
|
||||
* the rest of the Docusaurus start/build process. Instead of awaiting the
|
||||
* init promise, you can create/store it and await it later during reads.
|
||||
*
|
||||
* @param params Initialization params that can be useful to warm up the VCS
|
||||
*/
|
||||
initialize: (params: VscInitializeParams) => void;
|
||||
getFileCreationInfo: (filePath: string) => Promise<VcsChangeInfo | null>;
|
||||
getFileLastUpdateInfo: (filePath: string) => Promise<VcsChangeInfo | null>;
|
||||
};
|
||||
|
||||
/**
|
||||
* List of pre-built VcsConfig that Docusaurus provides.
|
||||
*/
|
||||
export type VcsPreset =
|
||||
| 'git-ad-hoc'
|
||||
| 'git-eager'
|
||||
| 'hardcoded'
|
||||
| 'disabled'
|
||||
| 'default-v1'
|
||||
| 'default-v2';
|
||||
|
||||
export type FutureConfig = {
|
||||
/**
|
||||
* Turns v4 future flags on
|
||||
|
|
@ -98,8 +50,6 @@ export type FutureConfig = {
|
|||
|
||||
experimental_storage: StorageConfig;
|
||||
|
||||
experimental_vcs: VcsConfig;
|
||||
|
||||
/**
|
||||
* Docusaurus can work with 2 router types.
|
||||
*
|
||||
|
|
@ -417,7 +367,6 @@ export type Config = Overwrite<
|
|||
{
|
||||
v4?: boolean | Partial<FutureV4Config>;
|
||||
experimental_faster?: boolean | Partial<FasterConfig>;
|
||||
experimental_vcs?: VcsPreset | VcsConfig | boolean;
|
||||
}
|
||||
>;
|
||||
}
|
||||
|
|
|
|||
|
|
@ -13,10 +13,6 @@ export {
|
|||
FutureV4Config,
|
||||
FasterConfig,
|
||||
StorageConfig,
|
||||
VcsConfig,
|
||||
VcsPreset,
|
||||
VcsChangeInfo,
|
||||
VscInitializeParams,
|
||||
Config,
|
||||
} from './config';
|
||||
|
||||
|
|
|
|||
|
|
@ -104,8 +104,6 @@ export type HtmlTagObject = {
|
|||
tagName: string;
|
||||
/** The inner HTML */
|
||||
innerHTML?: string;
|
||||
/** Allow custom html elements, e.g. `<custom-element>` */
|
||||
customElement?: boolean;
|
||||
};
|
||||
|
||||
export type HtmlTags = string | HtmlTagObject | (string | HtmlTagObject)[];
|
||||
|
|
|
|||
|
|
@ -22,7 +22,7 @@
|
|||
"@docusaurus/types": "3.9.2",
|
||||
"@docusaurus/utils-common": "3.9.2",
|
||||
"escape-string-regexp": "^4.0.0",
|
||||
"execa": "^5.1.1",
|
||||
"execa": "5.1.1",
|
||||
"file-loader": "^6.2.0",
|
||||
"fs-extra": "^11.1.1",
|
||||
"github-slugger": "^1.5.0",
|
||||
|
|
|
|||
1
packages/docusaurus-utils/src/__tests__/__fixtures__/simple-site/doc with space.md
generated
Normal file
1
packages/docusaurus-utils/src/__tests__/__fixtures__/simple-site/doc with space.md
generated
Normal file
|
|
@ -0,0 +1 @@
|
|||
# Hoo hoo, if this path tricks you...
|
||||
|
|
@ -0,0 +1,7 @@
|
|||
---
|
||||
id: hello
|
||||
title: Hello, World !
|
||||
slug: /
|
||||
---
|
||||
|
||||
Hello
|
||||
|
|
@ -0,0 +1,159 @@
|
|||
/**
|
||||
* Copyright (c) Facebook, Inc. and its affiliates.
|
||||
*
|
||||
* This source code is licensed under the MIT license found in the
|
||||
* LICENSE file in the root directory of this source tree.
|
||||
*/
|
||||
|
||||
import fs from 'fs-extra';
|
||||
import path from 'path';
|
||||
import {createTempRepo} from '@testing-utils/git';
|
||||
import {FileNotTrackedError, getFileCommitDate} from '../gitUtils';
|
||||
import {getGitLastUpdate} from '../lastUpdateUtils';
|
||||
|
||||
/* eslint-disable no-restricted-properties */
|
||||
function initializeTempRepo() {
|
||||
const {repoDir, git} = createTempRepo();
|
||||
|
||||
fs.writeFileSync(path.join(repoDir, 'test.txt'), 'Some content');
|
||||
git.commit(
|
||||
'Create test.txt',
|
||||
'2020-06-19',
|
||||
'Caroline <caroline@jc-verse.com>',
|
||||
);
|
||||
fs.writeFileSync(path.join(repoDir, 'test.txt'), 'Updated content');
|
||||
git.commit(
|
||||
'Update test.txt',
|
||||
'2020-06-20',
|
||||
'Josh-Cena <josh-cena@jc-verse.com>',
|
||||
);
|
||||
fs.writeFileSync(path.join(repoDir, 'test.txt'), 'Updated content (2)');
|
||||
fs.writeFileSync(path.join(repoDir, 'moved.txt'), 'This file is moved');
|
||||
git.commit(
|
||||
'Update test.txt again, create moved.txt',
|
||||
'2020-09-13',
|
||||
'Caroline <caroline@jc-verse.com>',
|
||||
);
|
||||
fs.moveSync(path.join(repoDir, 'moved.txt'), path.join(repoDir, 'dest.txt'));
|
||||
git.commit(
|
||||
'Rename moved.txt to dest.txt',
|
||||
'2020-11-13',
|
||||
'Josh-Cena <josh-cena@jc-verse.com>',
|
||||
);
|
||||
fs.writeFileSync(path.join(repoDir, 'untracked.txt'), "I'm untracked");
|
||||
|
||||
return repoDir;
|
||||
}
|
||||
|
||||
describe('getFileCommitDate', () => {
|
||||
const repoDir = initializeTempRepo();
|
||||
it('returns earliest commit date', async () => {
|
||||
await expect(
|
||||
getFileCommitDate(path.join(repoDir, 'test.txt'), {}),
|
||||
).resolves.toEqual({
|
||||
date: new Date('2020-06-19'),
|
||||
timestamp: new Date('2020-06-19').getTime(),
|
||||
});
|
||||
await expect(
|
||||
getFileCommitDate(path.join(repoDir, 'dest.txt'), {}),
|
||||
).resolves.toEqual({
|
||||
date: new Date('2020-09-13'),
|
||||
timestamp: new Date('2020-09-13').getTime(),
|
||||
});
|
||||
});
|
||||
it('returns latest commit date', async () => {
|
||||
await expect(
|
||||
getFileCommitDate(path.join(repoDir, 'test.txt'), {age: 'newest'}),
|
||||
).resolves.toEqual({
|
||||
date: new Date('2020-09-13'),
|
||||
timestamp: new Date('2020-09-13').getTime(),
|
||||
});
|
||||
await expect(
|
||||
getFileCommitDate(path.join(repoDir, 'dest.txt'), {age: 'newest'}),
|
||||
).resolves.toEqual({
|
||||
date: new Date('2020-11-13'),
|
||||
timestamp: new Date('2020-11-13').getTime(),
|
||||
});
|
||||
});
|
||||
it('returns latest commit date with author', async () => {
|
||||
await expect(
|
||||
getFileCommitDate(path.join(repoDir, 'test.txt'), {
|
||||
age: 'oldest',
|
||||
includeAuthor: true,
|
||||
}),
|
||||
).resolves.toEqual({
|
||||
date: new Date('2020-06-19'),
|
||||
timestamp: new Date('2020-06-19').getTime(),
|
||||
author: 'Caroline',
|
||||
});
|
||||
await expect(
|
||||
getFileCommitDate(path.join(repoDir, 'dest.txt'), {
|
||||
age: 'oldest',
|
||||
includeAuthor: true,
|
||||
}),
|
||||
).resolves.toEqual({
|
||||
date: new Date('2020-09-13'),
|
||||
timestamp: new Date('2020-09-13').getTime(),
|
||||
author: 'Caroline',
|
||||
});
|
||||
});
|
||||
it('returns earliest commit date with author', async () => {
|
||||
await expect(
|
||||
getFileCommitDate(path.join(repoDir, 'test.txt'), {
|
||||
age: 'newest',
|
||||
includeAuthor: true,
|
||||
}),
|
||||
).resolves.toEqual({
|
||||
date: new Date('2020-09-13'),
|
||||
timestamp: new Date('2020-09-13').getTime(),
|
||||
author: 'Caroline',
|
||||
});
|
||||
await expect(
|
||||
getFileCommitDate(path.join(repoDir, 'dest.txt'), {
|
||||
age: 'newest',
|
||||
includeAuthor: true,
|
||||
}),
|
||||
).resolves.toEqual({
|
||||
date: new Date('2020-11-13'),
|
||||
timestamp: new Date('2020-11-13').getTime(),
|
||||
author: 'Josh-Cena',
|
||||
});
|
||||
});
|
||||
it('throws custom error when file is not tracked', async () => {
|
||||
await expect(() =>
|
||||
getFileCommitDate(path.join(repoDir, 'untracked.txt'), {
|
||||
age: 'newest',
|
||||
includeAuthor: true,
|
||||
}),
|
||||
).rejects.toThrow(FileNotTrackedError);
|
||||
});
|
||||
it('throws when file not found', async () => {
|
||||
await expect(() =>
|
||||
getFileCommitDate(path.join(repoDir, 'nonexistent.txt'), {
|
||||
age: 'newest',
|
||||
includeAuthor: true,
|
||||
}),
|
||||
).rejects.toThrow(
|
||||
/Failed to retrieve git history for ".*nonexistent.txt" because the file does not exist./,
|
||||
);
|
||||
});
|
||||
|
||||
it('multiple files not tracked by git', async () => {
|
||||
const consoleMock = jest
|
||||
.spyOn(console, 'warn')
|
||||
.mockImplementation(() => {});
|
||||
const tempFilePath1 = path.join(repoDir, 'file1.md');
|
||||
const tempFilePath2 = path.join(repoDir, 'file2.md');
|
||||
await fs.writeFile(tempFilePath1, 'Lorem ipsum :)');
|
||||
await fs.writeFile(tempFilePath2, 'Lorem ipsum :)');
|
||||
// TODO this is not the correct place to test "getGitLastUpdate"
|
||||
await expect(getGitLastUpdate(tempFilePath1)).resolves.toBeNull();
|
||||
await expect(getGitLastUpdate(tempFilePath2)).resolves.toBeNull();
|
||||
expect(consoleMock).toHaveBeenCalledTimes(1);
|
||||
expect(consoleMock).toHaveBeenLastCalledWith(
|
||||
expect.stringMatching(/not tracked by git./),
|
||||
);
|
||||
await fs.unlink(tempFilePath1);
|
||||
await fs.unlink(tempFilePath2);
|
||||
});
|
||||
});
|
||||
|
|
@ -5,85 +5,162 @@
|
|||
* LICENSE file in the root directory of this source tree.
|
||||
*/
|
||||
|
||||
import {readLastUpdateData} from '../lastUpdateUtils';
|
||||
import {TEST_VCS} from '../vcs/vcs';
|
||||
import {jest} from '@jest/globals';
|
||||
import fs from 'fs-extra';
|
||||
import path from 'path';
|
||||
import {createTempRepo} from '@testing-utils/git';
|
||||
import execa from 'execa';
|
||||
|
||||
import {
|
||||
getGitLastUpdate,
|
||||
LAST_UPDATE_FALLBACK,
|
||||
LAST_UPDATE_UNTRACKED_GIT_FILEPATH,
|
||||
readLastUpdateData,
|
||||
} from '../lastUpdateUtils';
|
||||
import type {FrontMatterLastUpdate} from '../lastUpdateUtils';
|
||||
|
||||
describe('getGitLastUpdate', () => {
|
||||
const {repoDir} = createTempRepo();
|
||||
|
||||
const existingFilePath = path.join(
|
||||
__dirname,
|
||||
'__fixtures__/simple-site/hello.md',
|
||||
);
|
||||
it('existing test file in repository with Git timestamp', async () => {
|
||||
const lastUpdateData = await getGitLastUpdate(existingFilePath);
|
||||
expect(lastUpdateData).not.toBeNull();
|
||||
|
||||
const {lastUpdatedAt, lastUpdatedBy} = lastUpdateData!;
|
||||
expect(lastUpdatedBy).not.toBeNull();
|
||||
expect(typeof lastUpdatedBy).toBe('string');
|
||||
|
||||
expect(lastUpdatedAt).not.toBeNull();
|
||||
expect(typeof lastUpdatedAt).toBe('number');
|
||||
});
|
||||
|
||||
it('existing test file with spaces in path', async () => {
|
||||
const filePathWithSpace = path.join(
|
||||
__dirname,
|
||||
'__fixtures__/simple-site/doc with space.md',
|
||||
);
|
||||
const lastUpdateData = await getGitLastUpdate(filePathWithSpace);
|
||||
expect(lastUpdateData).not.toBeNull();
|
||||
|
||||
const {lastUpdatedBy, lastUpdatedAt} = lastUpdateData!;
|
||||
expect(lastUpdatedBy).not.toBeNull();
|
||||
expect(typeof lastUpdatedBy).toBe('string');
|
||||
|
||||
expect(lastUpdatedAt).not.toBeNull();
|
||||
expect(typeof lastUpdatedAt).toBe('number');
|
||||
});
|
||||
|
||||
it('non-existing file', async () => {
|
||||
const consoleMock = jest
|
||||
.spyOn(console, 'warn')
|
||||
.mockImplementation(() => {});
|
||||
const nonExistingFileName = '.nonExisting';
|
||||
const nonExistingFilePath = path.join(
|
||||
__dirname,
|
||||
'__fixtures__',
|
||||
nonExistingFileName,
|
||||
);
|
||||
await expect(getGitLastUpdate(nonExistingFilePath)).rejects.toThrow(
|
||||
/An error occurred when trying to get the last update date/,
|
||||
);
|
||||
expect(consoleMock).toHaveBeenCalledTimes(0);
|
||||
consoleMock.mockRestore();
|
||||
});
|
||||
|
||||
it('git does not exist', async () => {
|
||||
const mock = jest.spyOn(execa, 'sync').mockImplementationOnce(() => {
|
||||
throw new Error('Git does not exist');
|
||||
});
|
||||
|
||||
const consoleMock = jest
|
||||
.spyOn(console, 'warn')
|
||||
.mockImplementation(() => {});
|
||||
const lastUpdateData = await getGitLastUpdate(existingFilePath);
|
||||
expect(lastUpdateData).toBeNull();
|
||||
expect(consoleMock).toHaveBeenLastCalledWith(
|
||||
expect.stringMatching(
|
||||
/.*\[WARNING\].* Sorry, the last update options require Git\..*/,
|
||||
),
|
||||
);
|
||||
|
||||
consoleMock.mockRestore();
|
||||
mock.mockRestore();
|
||||
});
|
||||
|
||||
it('temporary created file that is not tracked by git', async () => {
|
||||
const consoleMock = jest
|
||||
.spyOn(console, 'warn')
|
||||
.mockImplementation(() => {});
|
||||
const tempFilePath = path.join(repoDir, 'file.md');
|
||||
await fs.writeFile(tempFilePath, 'Lorem ipsum :)');
|
||||
await expect(getGitLastUpdate(tempFilePath)).resolves.toBeNull();
|
||||
expect(consoleMock).toHaveBeenCalledTimes(1);
|
||||
expect(consoleMock).toHaveBeenLastCalledWith(
|
||||
expect.stringMatching(/not tracked by git./),
|
||||
);
|
||||
await fs.unlink(tempFilePath);
|
||||
});
|
||||
});
|
||||
|
||||
describe('readLastUpdateData', () => {
|
||||
const testDate = '2021-01-01';
|
||||
const testTimestamp = new Date(testDate).getTime();
|
||||
const testAuthor = 'ozaki';
|
||||
|
||||
async function readData(
|
||||
filePath: string,
|
||||
options: Parameters<typeof readLastUpdateData>[1],
|
||||
lastUpdateFrontMatter: Parameters<typeof readLastUpdateData>[2],
|
||||
) {
|
||||
return readLastUpdateData(
|
||||
filePath,
|
||||
options,
|
||||
lastUpdateFrontMatter,
|
||||
TEST_VCS,
|
||||
);
|
||||
}
|
||||
|
||||
describe('on untracked Git file', () => {
|
||||
function readUntrackedFile(
|
||||
lastUpdateFrontMatter: FrontMatterLastUpdate | undefined,
|
||||
) {
|
||||
return readData(
|
||||
TEST_VCS.UNTRACKED_FILE_PATH,
|
||||
function test(lastUpdateFrontMatter: FrontMatterLastUpdate | undefined) {
|
||||
return readLastUpdateData(
|
||||
LAST_UPDATE_UNTRACKED_GIT_FILEPATH,
|
||||
{showLastUpdateAuthor: true, showLastUpdateTime: true},
|
||||
lastUpdateFrontMatter,
|
||||
);
|
||||
}
|
||||
|
||||
it('reads null at/by from Git', async () => {
|
||||
const {lastUpdatedAt, lastUpdatedBy} = await readUntrackedFile({});
|
||||
const {lastUpdatedAt, lastUpdatedBy} = await test({});
|
||||
expect(lastUpdatedAt).toBeNull();
|
||||
expect(lastUpdatedBy).toBeNull();
|
||||
});
|
||||
|
||||
it('reads null at from Git and author from front matter', async () => {
|
||||
const {lastUpdatedAt, lastUpdatedBy} = await readUntrackedFile({
|
||||
author: testAuthor,
|
||||
});
|
||||
const {lastUpdatedAt, lastUpdatedBy} = await test({author: testAuthor});
|
||||
expect(lastUpdatedAt).toBeNull();
|
||||
expect(lastUpdatedBy).toEqual(testAuthor);
|
||||
});
|
||||
|
||||
it('reads null by from Git and date from front matter', async () => {
|
||||
const {lastUpdatedAt, lastUpdatedBy} = await readUntrackedFile({
|
||||
date: testDate,
|
||||
});
|
||||
const {lastUpdatedAt, lastUpdatedBy} = await test({date: testDate});
|
||||
expect(lastUpdatedBy).toBeNull();
|
||||
expect(lastUpdatedAt).toEqual(testTimestamp);
|
||||
});
|
||||
});
|
||||
|
||||
it('read last time show author time', async () => {
|
||||
const {lastUpdatedAt, lastUpdatedBy} = await readData(
|
||||
const {lastUpdatedAt, lastUpdatedBy} = await readLastUpdateData(
|
||||
'',
|
||||
{showLastUpdateAuthor: true, showLastUpdateTime: true},
|
||||
{date: testDate},
|
||||
);
|
||||
expect(lastUpdatedAt).toEqual(testTimestamp);
|
||||
expect(lastUpdatedBy).toBe(TEST_VCS.LAST_UPDATE_INFO.author);
|
||||
expect(lastUpdatedBy).toBe(LAST_UPDATE_FALLBACK.lastUpdatedBy);
|
||||
});
|
||||
|
||||
it('read last author show author time', async () => {
|
||||
const {lastUpdatedAt, lastUpdatedBy} = await readData(
|
||||
const {lastUpdatedAt, lastUpdatedBy} = await readLastUpdateData(
|
||||
'',
|
||||
{showLastUpdateAuthor: true, showLastUpdateTime: true},
|
||||
{author: testAuthor},
|
||||
);
|
||||
expect(lastUpdatedBy).toEqual(testAuthor);
|
||||
expect(lastUpdatedAt).toBe(TEST_VCS.LAST_UPDATE_INFO.timestamp);
|
||||
expect(lastUpdatedAt).toBe(LAST_UPDATE_FALLBACK.lastUpdatedAt);
|
||||
});
|
||||
|
||||
it('read last all show author time', async () => {
|
||||
const {lastUpdatedAt, lastUpdatedBy} = await readData(
|
||||
const {lastUpdatedAt, lastUpdatedBy} = await readLastUpdateData(
|
||||
'',
|
||||
{showLastUpdateAuthor: true, showLastUpdateTime: true},
|
||||
{author: testAuthor, date: testDate},
|
||||
|
|
@ -93,7 +170,7 @@ describe('readLastUpdateData', () => {
|
|||
});
|
||||
|
||||
it('read last default show none', async () => {
|
||||
const lastUpdate = await readData(
|
||||
const lastUpdate = await readLastUpdateData(
|
||||
'',
|
||||
{showLastUpdateAuthor: false, showLastUpdateTime: false},
|
||||
{},
|
||||
|
|
@ -102,7 +179,7 @@ describe('readLastUpdateData', () => {
|
|||
});
|
||||
|
||||
it('read last author show none', async () => {
|
||||
const lastUpdate = await readData(
|
||||
const lastUpdate = await readLastUpdateData(
|
||||
'',
|
||||
{showLastUpdateAuthor: false, showLastUpdateTime: false},
|
||||
{author: testAuthor},
|
||||
|
|
@ -111,17 +188,17 @@ describe('readLastUpdateData', () => {
|
|||
});
|
||||
|
||||
it('read last time show author', async () => {
|
||||
const {lastUpdatedAt, lastUpdatedBy} = await readData(
|
||||
const {lastUpdatedAt, lastUpdatedBy} = await readLastUpdateData(
|
||||
'',
|
||||
{showLastUpdateAuthor: true, showLastUpdateTime: false},
|
||||
{date: testDate},
|
||||
);
|
||||
expect(lastUpdatedBy).toBe(TEST_VCS.LAST_UPDATE_INFO.author);
|
||||
expect(lastUpdatedBy).toBe(LAST_UPDATE_FALLBACK.lastUpdatedBy);
|
||||
expect(lastUpdatedAt).toBeUndefined();
|
||||
});
|
||||
|
||||
it('read last author show author', async () => {
|
||||
const {lastUpdatedAt, lastUpdatedBy} = await readData(
|
||||
const {lastUpdatedAt, lastUpdatedBy} = await readLastUpdateData(
|
||||
'',
|
||||
{showLastUpdateAuthor: true, showLastUpdateTime: false},
|
||||
{author: testAuthor},
|
||||
|
|
@ -131,17 +208,17 @@ describe('readLastUpdateData', () => {
|
|||
});
|
||||
|
||||
it('read last default show author default', async () => {
|
||||
const {lastUpdatedAt, lastUpdatedBy} = await readData(
|
||||
const {lastUpdatedAt, lastUpdatedBy} = await readLastUpdateData(
|
||||
'',
|
||||
{showLastUpdateAuthor: true, showLastUpdateTime: false},
|
||||
{},
|
||||
);
|
||||
expect(lastUpdatedBy).toBe(TEST_VCS.LAST_UPDATE_INFO.author);
|
||||
expect(lastUpdatedBy).toBe(LAST_UPDATE_FALLBACK.lastUpdatedBy);
|
||||
expect(lastUpdatedAt).toBeUndefined();
|
||||
});
|
||||
|
||||
it('read last time show time', async () => {
|
||||
const {lastUpdatedAt, lastUpdatedBy} = await readData(
|
||||
const {lastUpdatedAt, lastUpdatedBy} = await readLastUpdateData(
|
||||
'',
|
||||
{showLastUpdateAuthor: false, showLastUpdateTime: true},
|
||||
{date: testDate},
|
||||
|
|
@ -151,17 +228,17 @@ describe('readLastUpdateData', () => {
|
|||
});
|
||||
|
||||
it('read last author show time', async () => {
|
||||
const {lastUpdatedAt, lastUpdatedBy} = await readData(
|
||||
const {lastUpdatedAt, lastUpdatedBy} = await readLastUpdateData(
|
||||
'',
|
||||
{showLastUpdateAuthor: false, showLastUpdateTime: true},
|
||||
{author: testAuthor},
|
||||
);
|
||||
expect(lastUpdatedBy).toBeUndefined();
|
||||
expect(lastUpdatedAt).toEqual(TEST_VCS.LAST_UPDATE_INFO.timestamp);
|
||||
expect(lastUpdatedAt).toEqual(LAST_UPDATE_FALLBACK.lastUpdatedAt);
|
||||
});
|
||||
|
||||
it('read last author show time only - both front matter', async () => {
|
||||
const {lastUpdatedAt, lastUpdatedBy} = await readData(
|
||||
const {lastUpdatedAt, lastUpdatedBy} = await readLastUpdateData(
|
||||
'',
|
||||
{showLastUpdateAuthor: false, showLastUpdateTime: true},
|
||||
{author: testAuthor, date: testDate},
|
||||
|
|
@ -171,7 +248,7 @@ describe('readLastUpdateData', () => {
|
|||
});
|
||||
|
||||
it('read last author show author only - both front matter', async () => {
|
||||
const {lastUpdatedAt, lastUpdatedBy} = await readData(
|
||||
const {lastUpdatedAt, lastUpdatedBy} = await readLastUpdateData(
|
||||
'',
|
||||
{showLastUpdateAuthor: true, showLastUpdateTime: false},
|
||||
{author: testAuthor, date: testDate},
|
||||
|
|
|
|||
|
|
@ -0,0 +1,200 @@
|
|||
/**
|
||||
* Copyright (c) Facebook, Inc. and its affiliates.
|
||||
*
|
||||
* This source code is licensed under the MIT license found in the
|
||||
* LICENSE file in the root directory of this source tree.
|
||||
*/
|
||||
|
||||
import path from 'path';
|
||||
import fs from 'fs-extra';
|
||||
import os from 'os';
|
||||
import _ from 'lodash';
|
||||
import execa from 'execa';
|
||||
import PQueue from 'p-queue';
|
||||
|
||||
// Quite high/conservative concurrency value (it was previously "Infinity")
|
||||
// See https://github.com/facebook/docusaurus/pull/10915
|
||||
const DefaultGitCommandConcurrency =
|
||||
// TODO Docusaurus v4: bump node, availableParallelism() now always exists
|
||||
(typeof os.availableParallelism === 'function'
|
||||
? os.availableParallelism()
|
||||
: os.cpus().length) * 4;
|
||||
|
||||
const GitCommandConcurrencyEnv = process.env.DOCUSAURUS_GIT_COMMAND_CONCURRENCY
|
||||
? parseInt(process.env.DOCUSAURUS_GIT_COMMAND_CONCURRENCY, 10)
|
||||
: undefined;
|
||||
|
||||
const GitCommandConcurrency =
|
||||
GitCommandConcurrencyEnv && GitCommandConcurrencyEnv > 0
|
||||
? GitCommandConcurrencyEnv
|
||||
: DefaultGitCommandConcurrency;
|
||||
|
||||
// We use a queue to avoid running too many concurrent Git commands at once
|
||||
// See https://github.com/facebook/docusaurus/issues/10348
|
||||
const GitCommandQueue = new PQueue({
|
||||
concurrency: GitCommandConcurrency,
|
||||
});
|
||||
|
||||
const realHasGitFn = () => {
|
||||
try {
|
||||
return execa.sync('git', ['--version']).exitCode === 0;
|
||||
} catch (error) {
|
||||
return false;
|
||||
}
|
||||
};
|
||||
|
||||
// The hasGit call is synchronous IO so we memoize it
|
||||
// The user won't install Git in the middle of a build anyway...
|
||||
const hasGit =
|
||||
process.env.NODE_ENV === 'test' ? realHasGitFn : _.memoize(realHasGitFn);
|
||||
|
||||
/** Custom error thrown when git is not found in `PATH`. */
|
||||
export class GitNotFoundError extends Error {}
|
||||
|
||||
/** Custom error thrown when the current file is not tracked by git. */
|
||||
export class FileNotTrackedError extends Error {}
|
||||
|
||||
/**
|
||||
* Fetches the git history of a file and returns a relevant commit date.
|
||||
* It gets the commit date instead of author date so that amended commits
|
||||
* can have their dates updated.
|
||||
*
|
||||
* @throws {@link GitNotFoundError} If git is not found in `PATH`.
|
||||
* @throws {@link FileNotTrackedError} If the current file is not tracked by git.
|
||||
* @throws Also throws when `git log` exited with non-zero, or when it outputs
|
||||
* unexpected text.
|
||||
*/
|
||||
export async function getFileCommitDate(
|
||||
/** Absolute path to the file. */
|
||||
file: string,
|
||||
args: {
|
||||
/**
|
||||
* `"oldest"` is the commit that added the file, following renames;
|
||||
* `"newest"` is the last commit that edited the file.
|
||||
*/
|
||||
age?: 'oldest' | 'newest';
|
||||
/** Use `includeAuthor: true` to get the author information as well. */
|
||||
includeAuthor?: false;
|
||||
},
|
||||
): Promise<{
|
||||
/** Relevant commit date. */
|
||||
date: Date;
|
||||
/** Timestamp returned from git, converted to **milliseconds**. */
|
||||
timestamp: number;
|
||||
}>;
|
||||
/**
|
||||
* Fetches the git history of a file and returns a relevant commit date.
|
||||
* It gets the commit date instead of author date so that amended commits
|
||||
* can have their dates updated.
|
||||
*
|
||||
* @throws {@link GitNotFoundError} If git is not found in `PATH`.
|
||||
* @throws {@link FileNotTrackedError} If the current file is not tracked by git.
|
||||
* @throws Also throws when `git log` exited with non-zero, or when it outputs
|
||||
* unexpected text.
|
||||
*/
|
||||
export async function getFileCommitDate(
|
||||
/** Absolute path to the file. */
|
||||
file: string,
|
||||
args: {
|
||||
/**
|
||||
* `"oldest"` is the commit that added the file, following renames;
|
||||
* `"newest"` is the last commit that edited the file.
|
||||
*/
|
||||
age?: 'oldest' | 'newest';
|
||||
includeAuthor: true;
|
||||
},
|
||||
): Promise<{
|
||||
/** Relevant commit date. */
|
||||
date: Date;
|
||||
/** Timestamp returned from git, converted to **milliseconds**. */
|
||||
timestamp: number;
|
||||
/** The author's name, as returned from git. */
|
||||
author: string;
|
||||
}>;
|
||||
|
||||
export async function getFileCommitDate(
|
||||
file: string,
|
||||
{
|
||||
age = 'oldest',
|
||||
includeAuthor = false,
|
||||
}: {
|
||||
age?: 'oldest' | 'newest';
|
||||
includeAuthor?: boolean;
|
||||
},
|
||||
): Promise<{
|
||||
date: Date;
|
||||
timestamp: number;
|
||||
author?: string;
|
||||
}> {
|
||||
if (!hasGit()) {
|
||||
throw new GitNotFoundError(
|
||||
`Failed to retrieve git history for "${file}" because git is not installed.`,
|
||||
);
|
||||
}
|
||||
|
||||
if (!(await fs.pathExists(file))) {
|
||||
throw new Error(
|
||||
`Failed to retrieve git history for "${file}" because the file does not exist.`,
|
||||
);
|
||||
}
|
||||
|
||||
// We add a "RESULT:" prefix to make parsing easier
|
||||
// See why: https://github.com/facebook/docusaurus/pull/10022
|
||||
const resultFormat = includeAuthor ? 'RESULT:%ct,%an' : 'RESULT:%ct';
|
||||
|
||||
const args = [
|
||||
`--format=${resultFormat}`,
|
||||
'--max-count=1',
|
||||
age === 'oldest' ? '--follow --diff-filter=A' : undefined,
|
||||
]
|
||||
.filter(Boolean)
|
||||
.join(' ');
|
||||
|
||||
const command = `git -c log.showSignature=false log ${args} -- "${path.basename(
|
||||
file,
|
||||
)}"`;
|
||||
|
||||
const result = (await GitCommandQueue.add(() => {
|
||||
return execa(command, {
|
||||
cwd: path.dirname(file),
|
||||
shell: true,
|
||||
});
|
||||
}))!;
|
||||
|
||||
if (result.exitCode !== 0) {
|
||||
throw new Error(
|
||||
`Failed to retrieve the git history for file "${file}" with exit code ${result.exitCode}: ${result.stderr}`,
|
||||
);
|
||||
}
|
||||
|
||||
// We only parse the output line starting with our "RESULT:" prefix
|
||||
// See why https://github.com/facebook/docusaurus/pull/10022
|
||||
const regex = includeAuthor
|
||||
? /(?:^|\n)RESULT:(?<timestamp>\d+),(?<author>.+)(?:$|\n)/
|
||||
: /(?:^|\n)RESULT:(?<timestamp>\d+)(?:$|\n)/;
|
||||
|
||||
const output = result.stdout.trim();
|
||||
|
||||
if (!output) {
|
||||
throw new FileNotTrackedError(
|
||||
`Failed to retrieve the git history for file "${file}" because the file is not tracked by git.`,
|
||||
);
|
||||
}
|
||||
|
||||
const match = output.match(regex);
|
||||
|
||||
if (!match) {
|
||||
throw new Error(
|
||||
`Failed to retrieve the git history for file "${file}" with unexpected output: ${output}`,
|
||||
);
|
||||
}
|
||||
|
||||
const timestampInSeconds = Number(match.groups!.timestamp);
|
||||
const timestamp = timestampInSeconds * 1_000;
|
||||
const date = new Date(timestamp);
|
||||
|
||||
if (includeAuthor) {
|
||||
return {date, timestamp, author: match.groups!.author!};
|
||||
}
|
||||
return {date, timestamp};
|
||||
}
|
||||
|
|
@ -25,12 +25,10 @@ export {
|
|||
} from './constants';
|
||||
export {generate, readOutputHTMLFile} from './emitUtils';
|
||||
export {
|
||||
// TODO Docusaurus v4: remove these legacy exports,
|
||||
// they are only kept for retro-compatibility
|
||||
getFileCommitDate,
|
||||
FileNotTrackedError,
|
||||
GitNotFoundError,
|
||||
} from './vcs/gitUtils';
|
||||
} from './gitUtils';
|
||||
export {
|
||||
mergeTranslations,
|
||||
updateTranslationFileMessages,
|
||||
|
|
@ -123,11 +121,12 @@ export {askPreferredLanguage} from './cliUtils';
|
|||
export {flattenRoutes} from './routeUtils';
|
||||
|
||||
export {
|
||||
getGitLastUpdate,
|
||||
getLastUpdate,
|
||||
readLastUpdateData,
|
||||
LAST_UPDATE_FALLBACK,
|
||||
type LastUpdateData,
|
||||
type FrontMatterLastUpdate,
|
||||
} from './lastUpdateUtils';
|
||||
|
||||
export {VcsPresetNames, getVcsPreset, TEST_VCS} from './vcs/vcs';
|
||||
|
||||
export {normalizeTags, reportInlineTags} from './tags';
|
||||
|
|
|
|||
|
|
@ -6,9 +6,13 @@
|
|||
*/
|
||||
|
||||
import _ from 'lodash';
|
||||
import {getVcsPreset} from './vcs/vcs';
|
||||
|
||||
import type {PluginOptions, VcsConfig} from '@docusaurus/types';
|
||||
import logger from '@docusaurus/logger';
|
||||
import {
|
||||
FileNotTrackedError,
|
||||
GitNotFoundError,
|
||||
getFileCommitDate,
|
||||
} from './gitUtils';
|
||||
import type {PluginOptions} from '@docusaurus/types';
|
||||
|
||||
export type LastUpdateData = {
|
||||
/**
|
||||
|
|
@ -25,6 +29,72 @@ export type LastUpdateData = {
|
|||
lastUpdatedBy: string | undefined | null;
|
||||
};
|
||||
|
||||
let showedGitRequirementError = false;
|
||||
let showedFileNotTrackedError = false;
|
||||
|
||||
export async function getGitLastUpdate(
|
||||
filePath: string,
|
||||
): Promise<LastUpdateData | null> {
|
||||
if (!filePath) {
|
||||
return null;
|
||||
}
|
||||
|
||||
// Wrap in try/catch in case the shell commands fail
|
||||
// (e.g. project doesn't use Git, etc).
|
||||
try {
|
||||
const result = await getFileCommitDate(filePath, {
|
||||
age: 'newest',
|
||||
includeAuthor: true,
|
||||
});
|
||||
|
||||
return {lastUpdatedAt: result.timestamp, lastUpdatedBy: result.author};
|
||||
} catch (err) {
|
||||
if (err instanceof GitNotFoundError) {
|
||||
if (!showedGitRequirementError) {
|
||||
logger.warn('Sorry, the last update options require Git.');
|
||||
showedGitRequirementError = true;
|
||||
}
|
||||
} else if (err instanceof FileNotTrackedError) {
|
||||
if (!showedFileNotTrackedError) {
|
||||
logger.warn(
|
||||
'Cannot infer the update date for some files, as they are not tracked by git.',
|
||||
);
|
||||
showedFileNotTrackedError = true;
|
||||
}
|
||||
} else {
|
||||
throw new Error(
|
||||
`An error occurred when trying to get the last update date`,
|
||||
{cause: err},
|
||||
);
|
||||
}
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
export const LAST_UPDATE_FALLBACK: LastUpdateData = {
|
||||
lastUpdatedAt: 1539502055000,
|
||||
lastUpdatedBy: 'Author',
|
||||
};
|
||||
|
||||
// Not proud of this, but convenient for tests :/
|
||||
export const LAST_UPDATE_UNTRACKED_GIT_FILEPATH = `file/path/${Math.random()}.mdx`;
|
||||
|
||||
export async function getLastUpdate(
|
||||
filePath: string,
|
||||
): Promise<LastUpdateData | null> {
|
||||
if (filePath === LAST_UPDATE_UNTRACKED_GIT_FILEPATH) {
|
||||
return null;
|
||||
}
|
||||
if (
|
||||
process.env.NODE_ENV !== 'production' ||
|
||||
process.env.DOCUSAURUS_DISABLE_LAST_UPDATE === 'true'
|
||||
) {
|
||||
// Use fake data in dev/test for faster development.
|
||||
return LAST_UPDATE_FALLBACK;
|
||||
}
|
||||
return getGitLastUpdate(filePath);
|
||||
}
|
||||
|
||||
type LastUpdateOptions = Pick<
|
||||
PluginOptions,
|
||||
'showLastUpdateAuthor' | 'showLastUpdateTime'
|
||||
|
|
@ -39,21 +109,11 @@ export type FrontMatterLastUpdate = {
|
|||
date?: Date | string;
|
||||
};
|
||||
|
||||
// TODO Docusaurus v4: refactor/rename, make it clear this fn is only
|
||||
// for Markdown files with front matter shared by content plugin
|
||||
export async function readLastUpdateData(
|
||||
filePath: string,
|
||||
options: LastUpdateOptions,
|
||||
lastUpdateFrontMatter: FrontMatterLastUpdate | undefined,
|
||||
vcsParam: Pick<VcsConfig, 'getFileLastUpdateInfo'>,
|
||||
): Promise<LastUpdateData> {
|
||||
// We fallback to the default VSC config at runtime on purpose
|
||||
// It preserves retro-compatibility if a third-party plugin imports it
|
||||
// This also ensures unit tests keep working without extra setup
|
||||
// We still want to ensure type safety by requiring the VCS param
|
||||
// TODO Docusaurus v4: refactor all these Git read APIs
|
||||
const vcs = vcsParam ?? getVcsPreset('default-v1');
|
||||
|
||||
const {showLastUpdateAuthor, showLastUpdateTime} = options;
|
||||
|
||||
if (!showLastUpdateAuthor && !showLastUpdateTime) {
|
||||
|
|
@ -68,16 +128,14 @@ export async function readLastUpdateData(
|
|||
// We try to minimize git last update calls
|
||||
// We call it at most once
|
||||
// If all the data is provided as front matter, we do not call it
|
||||
const getLastUpdateMemoized = _.memoize(() =>
|
||||
vcs.getFileLastUpdateInfo(filePath),
|
||||
);
|
||||
const getLastUpdateMemoized = _.memoize(() => getLastUpdate(filePath));
|
||||
const getLastUpdateBy = () =>
|
||||
getLastUpdateMemoized().then((update) => {
|
||||
// Important, see https://github.com/facebook/docusaurus/pull/11211
|
||||
if (update === null) {
|
||||
return null;
|
||||
}
|
||||
return update?.author;
|
||||
return update?.lastUpdatedBy;
|
||||
});
|
||||
const getLastUpdateAt = () =>
|
||||
getLastUpdateMemoized().then((update) => {
|
||||
|
|
@ -85,7 +143,7 @@ export async function readLastUpdateData(
|
|||
if (update === null) {
|
||||
return null;
|
||||
}
|
||||
return update?.timestamp;
|
||||
return update?.lastUpdatedAt;
|
||||
});
|
||||
|
||||
const lastUpdatedBy = showLastUpdateAuthor
|
||||
|
|
|
|||
|
|
@ -1 +0,0 @@
|
|||
A site fixture with files versioned on Git.
|
||||
|
|
@ -1 +0,0 @@
|
|||
Blog 2
|
||||
|
|
@ -1 +0,0 @@
|
|||
Blog 1
|
||||
|
|
@ -1,3 +0,0 @@
|
|||
This is a partial in file/folder starting with _:
|
||||
|
||||
It should be excluded by default
|
||||
|
|
@ -1 +0,0 @@
|
|||
Doc with space in name
|
||||
|
|
@ -1 +0,0 @@
|
|||
Doc 1
|
||||
|
|
@ -1 +0,0 @@
|
|||
Doc 2
|
||||
|
|
@ -1,723 +0,0 @@
|
|||
/**
|
||||
* Copyright (c) Facebook, Inc. and its affiliates.
|
||||
*
|
||||
* This source code is licensed under the MIT license found in the
|
||||
* LICENSE file in the root directory of this source tree.
|
||||
*/
|
||||
|
||||
import fs from 'fs-extra';
|
||||
import path from 'path';
|
||||
import os from 'os';
|
||||
import execa from 'execa';
|
||||
|
||||
import {
|
||||
FileNotTrackedError,
|
||||
getFileCommitDate,
|
||||
getGitLastUpdate,
|
||||
getGitCreation,
|
||||
getGitRepoRoot,
|
||||
getGitSuperProjectRoot,
|
||||
getGitSubmodulePaths,
|
||||
getGitAllRepoRoots,
|
||||
getGitRepositoryFilesInfo,
|
||||
} from '../gitUtils';
|
||||
|
||||
class Git {
|
||||
private constructor(private dir: string) {
|
||||
this.dir = dir;
|
||||
}
|
||||
|
||||
private static async runOptimisticGitCommand({
|
||||
cwd,
|
||||
cmd,
|
||||
args,
|
||||
options,
|
||||
}: {
|
||||
cwd: string;
|
||||
args: string[];
|
||||
cmd: string;
|
||||
options?: execa.Options;
|
||||
}): Promise<execa.ExecaReturnValue> {
|
||||
const res = await execa(cmd, args, {
|
||||
cwd,
|
||||
silent: true,
|
||||
shell: true,
|
||||
...options,
|
||||
});
|
||||
if (res.exitCode !== 0) {
|
||||
throw new Error(
|
||||
`Git command failed with code ${res.exitCode}: ${cmd} ${args.join(
|
||||
' ',
|
||||
)}`,
|
||||
);
|
||||
}
|
||||
return res;
|
||||
}
|
||||
|
||||
static async initializeRepo(dir: string): Promise<Git> {
|
||||
await Git.runOptimisticGitCommand({
|
||||
cmd: 'git',
|
||||
args: ['init'],
|
||||
cwd: dir,
|
||||
});
|
||||
await Git.runOptimisticGitCommand({
|
||||
cmd: 'git',
|
||||
args: ['config', 'user.email', '"test@example.com"'],
|
||||
cwd: dir,
|
||||
});
|
||||
await Git.runOptimisticGitCommand({
|
||||
cmd: 'git',
|
||||
args: ['config', 'user.name', '"Test"'],
|
||||
cwd: dir,
|
||||
});
|
||||
await Git.runOptimisticGitCommand({
|
||||
cmd: 'git',
|
||||
args: ['commit', '--allow-empty', '-m "First commit"'],
|
||||
cwd: dir,
|
||||
});
|
||||
return new Git(dir);
|
||||
}
|
||||
|
||||
async runOptimisticGitCommand(
|
||||
cmd: string,
|
||||
args?: string[],
|
||||
options?: execa.Options,
|
||||
): Promise<execa.ExecaReturnValue> {
|
||||
return Git.runOptimisticGitCommand({cwd: this.dir, cmd, args, options});
|
||||
}
|
||||
|
||||
async add(filePath: string): Promise<void> {
|
||||
await this.runOptimisticGitCommand('git', ['add', filePath]);
|
||||
}
|
||||
async addAll(): Promise<void> {
|
||||
await this.runOptimisticGitCommand('git', ['add', '.']);
|
||||
}
|
||||
|
||||
async commit(msg: string, date: string, author: string): Promise<void> {
|
||||
await this.runOptimisticGitCommand(
|
||||
`git`,
|
||||
[
|
||||
'commit',
|
||||
`-m "${msg}"`,
|
||||
`--date "${date}T00:00:00Z"`,
|
||||
`--author "${author}"`,
|
||||
],
|
||||
{env: {GIT_COMMITTER_DATE: `${date}T00:00:00Z`}},
|
||||
);
|
||||
}
|
||||
|
||||
async commitFile(
|
||||
filePath: string,
|
||||
{
|
||||
fileContent,
|
||||
commitMessage,
|
||||
commitDate,
|
||||
commitAuthor,
|
||||
}: {
|
||||
fileContent?: string;
|
||||
commitMessage?: string;
|
||||
commitDate?: string;
|
||||
commitAuthor?: string;
|
||||
} = {},
|
||||
): Promise<void> {
|
||||
await fs.ensureDir(path.join(this.dir, path.dirname(filePath)));
|
||||
await fs.writeFile(
|
||||
path.join(this.dir, filePath),
|
||||
fileContent ?? `Content of ${filePath}`,
|
||||
);
|
||||
await this.add(filePath);
|
||||
await this.commit(
|
||||
commitMessage ?? `Create ${filePath}`,
|
||||
commitDate ?? '2020-06-19',
|
||||
commitAuthor ?? 'Seb <seb@example.com>',
|
||||
);
|
||||
}
|
||||
|
||||
async addSubmodule(name: string, repoPath: string): Promise<void> {
|
||||
return this.runOptimisticGitCommand('git', [
|
||||
'-c protocol.file.allow=always',
|
||||
'submodule',
|
||||
'add',
|
||||
repoPath,
|
||||
name,
|
||||
]);
|
||||
}
|
||||
|
||||
async defineSubmodules(submodules: {[name: string]: string}): Promise<void> {
|
||||
for (const entry of Object.entries(submodules)) {
|
||||
await this.addSubmodule(entry[0], entry[1]);
|
||||
}
|
||||
await this.runOptimisticGitCommand('git', [
|
||||
'submodule',
|
||||
'update',
|
||||
'--init',
|
||||
'--recursive',
|
||||
]);
|
||||
}
|
||||
}
|
||||
|
||||
async function createGitRepoEmpty(): Promise<{repoDir: string; git: Git}> {
|
||||
let repoDir = await fs.mkdtemp(path.join(os.tmpdir(), 'git-test-repo'));
|
||||
repoDir = await fs.realpath.native(repoDir);
|
||||
const git = await Git.initializeRepo(repoDir);
|
||||
return {repoDir, git};
|
||||
}
|
||||
|
||||
describe('commit info APIs', () => {
|
||||
async function createGitRepoTestFixture() {
|
||||
const {repoDir, git} = await createGitRepoEmpty();
|
||||
|
||||
await git.commitFile('test.txt', {
|
||||
fileContent: 'Some content',
|
||||
commitMessage: 'Create test.txt',
|
||||
commitDate: '2020-06-19',
|
||||
commitAuthor: 'Caroline <caroline@example.com>',
|
||||
});
|
||||
|
||||
await git.commitFile('test.txt', {
|
||||
fileContent: 'Updated content',
|
||||
commitMessage: 'Update test.txt',
|
||||
commitDate: '2020-06-20',
|
||||
commitAuthor: 'Josh-Cena <josh-cena@example.com>',
|
||||
});
|
||||
|
||||
await fs.writeFile(path.join(repoDir, 'test.txt'), 'Updated content (2)');
|
||||
await fs.writeFile(path.join(repoDir, 'moved.txt'), 'This file is moved');
|
||||
await git.addAll();
|
||||
await git.commit(
|
||||
'Update test.txt again, create moved.txt',
|
||||
'2020-09-13',
|
||||
'Robert <robert@example.com>',
|
||||
);
|
||||
|
||||
await fs.move(
|
||||
path.join(repoDir, 'moved.txt'),
|
||||
path.join(repoDir, 'dest.txt'),
|
||||
);
|
||||
await git.addAll();
|
||||
await git.commit(
|
||||
'Rename moved.txt to dest.txt',
|
||||
'2020-11-13',
|
||||
'Seb <seb@example.com>',
|
||||
);
|
||||
|
||||
await fs.writeFile(path.join(repoDir, 'untracked.txt'), "I'm untracked");
|
||||
|
||||
return repoDir;
|
||||
}
|
||||
|
||||
// Create the repo only once for all tests => faster tests
|
||||
const repoDirPromise = createGitRepoTestFixture();
|
||||
|
||||
describe('getFileCommitDate', () => {
|
||||
it('returns latest commit date with author', async () => {
|
||||
const repoDir = await repoDirPromise;
|
||||
|
||||
await expect(
|
||||
getFileCommitDate(path.join(repoDir, 'test.txt'), {
|
||||
age: 'oldest',
|
||||
includeAuthor: true,
|
||||
}),
|
||||
).resolves.toEqual({
|
||||
date: new Date('2020-06-19'),
|
||||
timestamp: new Date('2020-06-19').getTime(),
|
||||
author: 'Caroline',
|
||||
});
|
||||
await expect(
|
||||
getFileCommitDate(path.join(repoDir, 'dest.txt'), {
|
||||
age: 'oldest',
|
||||
includeAuthor: true,
|
||||
}),
|
||||
).resolves.toEqual({
|
||||
date: new Date('2020-09-13'),
|
||||
timestamp: new Date('2020-09-13').getTime(),
|
||||
author: 'Robert',
|
||||
});
|
||||
});
|
||||
|
||||
it('returns earliest commit date with author', async () => {
|
||||
const repoDir = await repoDirPromise;
|
||||
|
||||
await expect(
|
||||
getFileCommitDate(path.join(repoDir, 'test.txt'), {
|
||||
age: 'newest',
|
||||
includeAuthor: true,
|
||||
}),
|
||||
).resolves.toEqual({
|
||||
date: new Date('2020-09-13'),
|
||||
timestamp: new Date('2020-09-13').getTime(),
|
||||
author: 'Robert',
|
||||
});
|
||||
await expect(
|
||||
getFileCommitDate(path.join(repoDir, 'dest.txt'), {
|
||||
age: 'newest',
|
||||
includeAuthor: true,
|
||||
}),
|
||||
).resolves.toEqual({
|
||||
date: new Date('2020-11-13'),
|
||||
timestamp: new Date('2020-11-13').getTime(),
|
||||
author: 'Seb',
|
||||
});
|
||||
});
|
||||
|
||||
it('throws custom error when file is not tracked', async () => {
|
||||
const repoDir = await repoDirPromise;
|
||||
|
||||
await expect(() =>
|
||||
getFileCommitDate(path.join(repoDir, 'untracked.txt'), {
|
||||
age: 'newest',
|
||||
includeAuthor: true,
|
||||
}),
|
||||
).rejects.toThrow(FileNotTrackedError);
|
||||
});
|
||||
|
||||
it('throws when file not found', async () => {
|
||||
const repoDir = await createGitRepoTestFixture();
|
||||
|
||||
await expect(() =>
|
||||
getFileCommitDate(path.join(repoDir, 'nonexistent.txt'), {
|
||||
age: 'newest',
|
||||
includeAuthor: true,
|
||||
}),
|
||||
).rejects.toThrow(
|
||||
/Failed to retrieve git history for ".*nonexistent.txt" because the file does not exist./,
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe('commit info APIs', () => {
|
||||
it('returns creation info for test.txt', async () => {
|
||||
const repoDir = await repoDirPromise;
|
||||
|
||||
const filePath = path.join(repoDir, 'test.txt');
|
||||
await expect(getGitCreation(filePath)).resolves.toEqual({
|
||||
author: 'Caroline',
|
||||
timestamp: new Date('2020-06-19').getTime(),
|
||||
});
|
||||
|
||||
await expect(getGitLastUpdate(filePath)).resolves.toEqual({
|
||||
author: 'Robert',
|
||||
timestamp: new Date('2020-09-13').getTime(),
|
||||
});
|
||||
});
|
||||
|
||||
it('returns creation info for dest.txt', async () => {
|
||||
const repoDir = await repoDirPromise;
|
||||
|
||||
const filePath = path.join(repoDir, 'dest.txt');
|
||||
await expect(getGitCreation(filePath)).resolves.toEqual({
|
||||
author: 'Robert',
|
||||
timestamp: new Date('2020-09-13').getTime(),
|
||||
});
|
||||
await expect(getGitLastUpdate(filePath)).resolves.toEqual({
|
||||
author: 'Seb',
|
||||
timestamp: new Date('2020-11-13').getTime(),
|
||||
});
|
||||
});
|
||||
|
||||
it('returns creation info for untracked.txt', async () => {
|
||||
const repoDir = await repoDirPromise;
|
||||
|
||||
const filePath = path.join(repoDir, 'untracked.txt');
|
||||
await expect(getGitCreation(filePath)).resolves.toEqual(null);
|
||||
await expect(getGitLastUpdate(filePath)).resolves.toEqual(null);
|
||||
});
|
||||
|
||||
it('returns creation info for non-existing.txt', async () => {
|
||||
const repoDir = await repoDirPromise;
|
||||
|
||||
const filePath = path.join(repoDir, 'non-existing.txt');
|
||||
await expect(
|
||||
getGitCreation(filePath),
|
||||
).rejects.toThrowErrorMatchingInlineSnapshot(
|
||||
`"An error occurred when trying to get the last update date"`,
|
||||
);
|
||||
await expect(
|
||||
getGitLastUpdate(filePath),
|
||||
).rejects.toThrowErrorMatchingInlineSnapshot(
|
||||
`"An error occurred when trying to get the last update date"`,
|
||||
);
|
||||
});
|
||||
|
||||
it('returns files info', async () => {
|
||||
const repoDir = await repoDirPromise;
|
||||
|
||||
await expect(getGitRepositoryFilesInfo(repoDir)).resolves
|
||||
.toMatchInlineSnapshot(`
|
||||
Map {
|
||||
"dest.txt" => {
|
||||
"creation": {
|
||||
"author": "Seb",
|
||||
"timestamp": 1605225600000,
|
||||
},
|
||||
"lastUpdate": {
|
||||
"author": "Seb",
|
||||
"timestamp": 1605225600000,
|
||||
},
|
||||
},
|
||||
"moved.txt" => {
|
||||
"creation": {
|
||||
"author": "Robert",
|
||||
"timestamp": 1599955200000,
|
||||
},
|
||||
"lastUpdate": {
|
||||
"author": "Robert",
|
||||
"timestamp": 1599955200000,
|
||||
},
|
||||
},
|
||||
"test.txt" => {
|
||||
"creation": {
|
||||
"author": "Caroline",
|
||||
"timestamp": 1592524800000,
|
||||
},
|
||||
"lastUpdate": {
|
||||
"author": "Robert",
|
||||
"timestamp": 1599955200000,
|
||||
},
|
||||
},
|
||||
}
|
||||
`);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('getGitRepoRoot', () => {
|
||||
async function initTestRepo() {
|
||||
const {repoDir, git} = await createGitRepoEmpty();
|
||||
await git.commitFile('subDir/test.txt');
|
||||
return repoDir;
|
||||
}
|
||||
|
||||
// Create the repo only once for all tests => faster tests
|
||||
const repoDirPromise = initTestRepo();
|
||||
|
||||
it('returns repoDir for cwd=repoDir', async () => {
|
||||
const repoDir = await repoDirPromise;
|
||||
const cwd = repoDir;
|
||||
await expect(getGitRepoRoot(cwd)).resolves.toEqual(repoDir);
|
||||
});
|
||||
|
||||
it('returns repoDir for cwd=repoDir/subDir', async () => {
|
||||
const repoDir = await repoDirPromise;
|
||||
const cwd = path.join(repoDir, 'subDir');
|
||||
await expect(getGitRepoRoot(cwd)).resolves.toEqual(repoDir);
|
||||
});
|
||||
|
||||
it('returns Docusaurus repo for cwd=__dirname', async () => {
|
||||
const cwd = __dirname;
|
||||
await expect(getGitRepoRoot(cwd)).resolves.toMatch(/docusaurus$/);
|
||||
});
|
||||
|
||||
it('rejects for cwd=repoDir/doesNotExist', async () => {
|
||||
const repoDir = await repoDirPromise;
|
||||
const cwd = path.join(repoDir, 'doesNotExist');
|
||||
await expect(getGitRepoRoot(cwd)).rejects.toThrow(
|
||||
/Couldn't find the git repository root directory/,
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe('submodules APIs', () => {
|
||||
async function initTestRepo() {
|
||||
const superproject = await createGitRepoEmpty();
|
||||
await superproject.git.commitFile('README.md');
|
||||
await superproject.git.commitFile('website/docs/myDoc.md');
|
||||
|
||||
const submodule1 = await createGitRepoEmpty();
|
||||
await submodule1.git.commitFile('file1.txt');
|
||||
|
||||
const submodule2 = await createGitRepoEmpty();
|
||||
await submodule2.git.commitFile('subDir/file2.txt');
|
||||
|
||||
await superproject.git.defineSubmodules({
|
||||
'submodules/submodule1': submodule1.repoDir,
|
||||
'submodules/submodule2': submodule2.repoDir,
|
||||
});
|
||||
|
||||
return {superproject, submodule1, submodule2};
|
||||
}
|
||||
|
||||
// Create the repo only once for all tests => faster tests
|
||||
const repoPromise = initTestRepo();
|
||||
|
||||
describe('getGitSuperProjectRoot', () => {
|
||||
it('returns superproject dir for cwd=superproject', async () => {
|
||||
const repo = await repoPromise;
|
||||
const cwd = path.join(repo.superproject.repoDir);
|
||||
await expect(getGitSuperProjectRoot(cwd)).resolves.toEqual(
|
||||
repo.superproject.repoDir,
|
||||
);
|
||||
});
|
||||
|
||||
it('returns superproject dir for cwd=superproject/submodules', async () => {
|
||||
const repo = await repoPromise;
|
||||
const cwd = path.join(repo.superproject.repoDir, 'submodules');
|
||||
await expect(getGitSuperProjectRoot(cwd)).resolves.toEqual(
|
||||
repo.superproject.repoDir,
|
||||
);
|
||||
});
|
||||
|
||||
it('returns superproject dir for cwd=superproject/website/docs', async () => {
|
||||
const repo = await repoPromise;
|
||||
const cwd = path.join(repo.superproject.repoDir, 'website/docs');
|
||||
await expect(getGitSuperProjectRoot(cwd)).resolves.toEqual(
|
||||
repo.superproject.repoDir,
|
||||
);
|
||||
});
|
||||
|
||||
it('returns superproject dir for cwd=submodule1', async () => {
|
||||
const repo = await repoPromise;
|
||||
const cwd = path.join(repo.superproject.repoDir, 'submodules/submodule1');
|
||||
await expect(getGitSuperProjectRoot(cwd)).resolves.toEqual(
|
||||
repo.superproject.repoDir,
|
||||
);
|
||||
});
|
||||
|
||||
it('returns superproject dir for cwd=submodule2', async () => {
|
||||
const repo = await initTestRepo();
|
||||
const cwd = path.join(repo.superproject.repoDir, 'submodules/submodule2');
|
||||
await expect(getGitSuperProjectRoot(cwd)).resolves.toEqual(
|
||||
repo.superproject.repoDir,
|
||||
);
|
||||
});
|
||||
|
||||
it('returns superproject dir for cwd=submodule2/subDir', async () => {
|
||||
const repo = await repoPromise;
|
||||
const cwd = path.join(
|
||||
repo.superproject.repoDir,
|
||||
'submodules/submodule2/subDir',
|
||||
);
|
||||
await expect(getGitSuperProjectRoot(cwd)).resolves.toEqual(
|
||||
repo.superproject.repoDir,
|
||||
);
|
||||
});
|
||||
|
||||
it('rejects for cwd of untracked dir', async () => {
|
||||
const cwd = await os.tmpdir();
|
||||
// Do we really want this to throw?
|
||||
// Not sure, and Git doesn't help us failsafe and return null...
|
||||
await expect(getGitSuperProjectRoot(cwd)).rejects
|
||||
.toThrowErrorMatchingInlineSnapshot(`
|
||||
"Couldn't find the git superproject root directory
|
||||
Failure while running \`git rev-parse --show-superproject-working-tree\` from cwd="<TEMP_DIR>"
|
||||
The command executed throws an error: Command failed with exit code 128: git rev-parse --show-superproject-working-tree
|
||||
fatal: not a git repository (or any of the parent directories): .git"
|
||||
`);
|
||||
});
|
||||
});
|
||||
|
||||
describe('getGitSubmodulePaths', () => {
|
||||
it('returns submodules for cwd=superproject', async () => {
|
||||
const repo = await repoPromise;
|
||||
const cwd = path.join(repo.superproject.repoDir);
|
||||
await expect(getGitSubmodulePaths(cwd)).resolves.toEqual([
|
||||
'submodules/submodule1',
|
||||
'submodules/submodule2',
|
||||
]);
|
||||
});
|
||||
|
||||
it('returns submodules for cwd=superproject/website/docs', async () => {
|
||||
const repo = await repoPromise;
|
||||
const cwd = path.join(repo.superproject.repoDir, 'website', 'docs');
|
||||
await expect(getGitSubmodulePaths(cwd)).resolves.toEqual([
|
||||
// The returned paths are relative to CWD,
|
||||
// Not sure if it's the best behavior.
|
||||
// But you'd rather call this with the superproject root as CWD anyway!
|
||||
'../../submodules/submodule1',
|
||||
'../../submodules/submodule2',
|
||||
]);
|
||||
});
|
||||
|
||||
it('returns [] for cwd=submodules/submodule1', async () => {
|
||||
const repo = await repoPromise;
|
||||
const cwd = path.join(
|
||||
repo.superproject.repoDir,
|
||||
'submodules',
|
||||
'submodule1',
|
||||
);
|
||||
await expect(getGitSubmodulePaths(cwd)).resolves.toEqual([]);
|
||||
});
|
||||
|
||||
it('returns [] for cwd=submodules/submodule2/subDir', async () => {
|
||||
const repo = await repoPromise;
|
||||
const cwd = path.join(
|
||||
repo.superproject.repoDir,
|
||||
'submodules',
|
||||
'submodule2',
|
||||
'subDir',
|
||||
);
|
||||
await expect(getGitSubmodulePaths(cwd)).resolves.toEqual([]);
|
||||
});
|
||||
|
||||
it('rejects for cwd=doesNotExist', async () => {
|
||||
const repo = await repoPromise;
|
||||
const cwd = path.join(repo.superproject.repoDir, 'doesNotExist');
|
||||
await expect(getGitSubmodulePaths(cwd)).rejects.toThrow(
|
||||
/Couldn't read the list of git submodules/,
|
||||
);
|
||||
});
|
||||
|
||||
it('rejects for cwd=notTracked', async () => {
|
||||
const cwd = await os.tmpdir();
|
||||
await expect(getGitSubmodulePaths(cwd)).rejects.toThrow(
|
||||
/Couldn't read the list of git submodules/,
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe('getGitAllRepoRoots', () => {
|
||||
it('returns root paths for cwd=superproject', async () => {
|
||||
const repo = await repoPromise;
|
||||
const cwd = path.join(repo.superproject.repoDir);
|
||||
await expect(getGitAllRepoRoots(cwd)).resolves.toEqual([
|
||||
repo.superproject.repoDir,
|
||||
path.join(repo.superproject.repoDir, 'submodules', 'submodule1'),
|
||||
path.join(repo.superproject.repoDir, 'submodules', 'submodule2'),
|
||||
]);
|
||||
});
|
||||
|
||||
it('returns root paths for cwd=superproject/website/docs', async () => {
|
||||
const repo = await repoPromise;
|
||||
const cwd = path.join(repo.superproject.repoDir, 'website', 'docs');
|
||||
await expect(getGitAllRepoRoots(cwd)).resolves.toEqual([
|
||||
repo.superproject.repoDir,
|
||||
path.join(repo.superproject.repoDir, 'submodules', 'submodule1'),
|
||||
path.join(repo.superproject.repoDir, 'submodules', 'submodule2'),
|
||||
]);
|
||||
});
|
||||
|
||||
it('returns root paths for cwd=superproject/submodules', async () => {
|
||||
const repo = await repoPromise;
|
||||
const cwd = path.join(repo.superproject.repoDir, 'submodules');
|
||||
await expect(getGitAllRepoRoots(cwd)).resolves.toEqual([
|
||||
repo.superproject.repoDir,
|
||||
path.join(repo.superproject.repoDir, 'submodules', 'submodule1'),
|
||||
path.join(repo.superproject.repoDir, 'submodules', 'submodule2'),
|
||||
]);
|
||||
});
|
||||
|
||||
it('returns root paths for cwd=superproject/submodules/submodule1', async () => {
|
||||
const repo = await repoPromise;
|
||||
const cwd = path.join(
|
||||
repo.superproject.repoDir,
|
||||
'submodules',
|
||||
'submodule1',
|
||||
);
|
||||
await expect(getGitAllRepoRoots(cwd)).resolves.toEqual([
|
||||
repo.superproject.repoDir,
|
||||
path.join(repo.superproject.repoDir, 'submodules', 'submodule1'),
|
||||
path.join(repo.superproject.repoDir, 'submodules', 'submodule2'),
|
||||
]);
|
||||
});
|
||||
|
||||
it('returns root paths for cwd=superproject/submodules/submodule2/subDir', async () => {
|
||||
const repo = await repoPromise;
|
||||
const cwd = path.join(
|
||||
repo.superproject.repoDir,
|
||||
'submodules',
|
||||
'submodule2',
|
||||
'subDir',
|
||||
);
|
||||
await expect(getGitAllRepoRoots(cwd)).resolves.toEqual([
|
||||
repo.superproject.repoDir,
|
||||
path.join(repo.superproject.repoDir, 'submodules', 'submodule1'),
|
||||
path.join(repo.superproject.repoDir, 'submodules', 'submodule2'),
|
||||
]);
|
||||
});
|
||||
|
||||
it('rejects for cwd=doesNotExist', async () => {
|
||||
const repo = await repoPromise;
|
||||
const cwd = path.join(repo.superproject.repoDir, 'doesNotExist');
|
||||
await expect(getGitAllRepoRoots(cwd)).rejects.toThrow(
|
||||
/Could not get all the git repository root paths/,
|
||||
);
|
||||
});
|
||||
|
||||
it('rejects for cwd=notTracked', async () => {
|
||||
const cwd = await os.tmpdir();
|
||||
await expect(getGitAllRepoRoots(cwd)).rejects.toThrow(
|
||||
/Could not get all the git repository root paths/,
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe('getGitRepositoryFilesInfo', () => {
|
||||
it('for superproject', async () => {
|
||||
const repo = await repoPromise;
|
||||
const cwd = path.join(repo.superproject.repoDir);
|
||||
await expect(getGitRepositoryFilesInfo(cwd)).resolves
|
||||
.toMatchInlineSnapshot(`
|
||||
Map {
|
||||
"website/docs/myDoc.md" => {
|
||||
"creation": {
|
||||
"author": "Seb",
|
||||
"timestamp": 1592524800000,
|
||||
},
|
||||
"lastUpdate": {
|
||||
"author": "Seb",
|
||||
"timestamp": 1592524800000,
|
||||
},
|
||||
},
|
||||
"README.md" => {
|
||||
"creation": {
|
||||
"author": "Seb",
|
||||
"timestamp": 1592524800000,
|
||||
},
|
||||
"lastUpdate": {
|
||||
"author": "Seb",
|
||||
"timestamp": 1592524800000,
|
||||
},
|
||||
},
|
||||
}
|
||||
`);
|
||||
});
|
||||
|
||||
it('for submodule1', async () => {
|
||||
const repo = await repoPromise;
|
||||
const cwd = path.join(
|
||||
repo.superproject.repoDir,
|
||||
'submodules',
|
||||
'submodule1',
|
||||
);
|
||||
await expect(getGitRepositoryFilesInfo(cwd)).resolves
|
||||
.toMatchInlineSnapshot(`
|
||||
Map {
|
||||
"file1.txt" => {
|
||||
"creation": {
|
||||
"author": "Seb",
|
||||
"timestamp": 1592524800000,
|
||||
},
|
||||
"lastUpdate": {
|
||||
"author": "Seb",
|
||||
"timestamp": 1592524800000,
|
||||
},
|
||||
},
|
||||
}
|
||||
`);
|
||||
});
|
||||
|
||||
it('for submodule2', async () => {
|
||||
const repo = await repoPromise;
|
||||
const cwd = path.join(
|
||||
repo.superproject.repoDir,
|
||||
'submodules',
|
||||
'submodule2',
|
||||
);
|
||||
await expect(getGitRepositoryFilesInfo(cwd)).resolves
|
||||
.toMatchInlineSnapshot(`
|
||||
Map {
|
||||
"subDir/file2.txt" => {
|
||||
"creation": {
|
||||
"author": "Seb",
|
||||
"timestamp": 1592524800000,
|
||||
},
|
||||
"lastUpdate": {
|
||||
"author": "Seb",
|
||||
"timestamp": 1592524800000,
|
||||
},
|
||||
},
|
||||
}
|
||||
`);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
|
@ -1,524 +0,0 @@
|
|||
/**
|
||||
* Copyright (c) Facebook, Inc. and its affiliates.
|
||||
*
|
||||
* This source code is licensed under the MIT license found in the
|
||||
* LICENSE file in the root directory of this source tree.
|
||||
*/
|
||||
|
||||
import path from 'path';
|
||||
import fs from 'fs-extra';
|
||||
import os from 'os';
|
||||
import _ from 'lodash';
|
||||
import execa from 'execa';
|
||||
import PQueue from 'p-queue';
|
||||
import logger from '@docusaurus/logger';
|
||||
|
||||
// Quite high/conservative concurrency value (it was previously "Infinity")
|
||||
// See https://github.com/facebook/docusaurus/pull/10915
|
||||
const DefaultGitCommandConcurrency =
|
||||
// TODO Docusaurus v4: bump node, availableParallelism() now always exists
|
||||
(typeof os.availableParallelism === 'function'
|
||||
? os.availableParallelism()
|
||||
: os.cpus().length) * 4;
|
||||
|
||||
const GitCommandConcurrencyEnv = process.env.DOCUSAURUS_GIT_COMMAND_CONCURRENCY
|
||||
? parseInt(process.env.DOCUSAURUS_GIT_COMMAND_CONCURRENCY, 10)
|
||||
: undefined;
|
||||
|
||||
const GitCommandConcurrency =
|
||||
GitCommandConcurrencyEnv && GitCommandConcurrencyEnv > 0
|
||||
? GitCommandConcurrencyEnv
|
||||
: DefaultGitCommandConcurrency;
|
||||
|
||||
// We use a queue to avoid running too many concurrent Git commands at once
|
||||
// See https://github.com/facebook/docusaurus/issues/10348
|
||||
const GitCommandQueue = new PQueue({
|
||||
concurrency: GitCommandConcurrency,
|
||||
});
|
||||
|
||||
const realHasGitFn = () => {
|
||||
try {
|
||||
return execa.sync('git', ['--version']).exitCode === 0;
|
||||
} catch (error) {
|
||||
return false;
|
||||
}
|
||||
};
|
||||
|
||||
// The hasGit call is synchronous IO so we memoize it
|
||||
// The user won't install Git in the middle of a build anyway...
|
||||
const hasGit =
|
||||
process.env.NODE_ENV === 'test' ? realHasGitFn : _.memoize(realHasGitFn);
|
||||
|
||||
// TODO Docusaurus v4: remove this
|
||||
// Exceptions are not made for control flow logic
|
||||
/** Custom error thrown when git is not found in `PATH`. */
|
||||
export class GitNotFoundError extends Error {}
|
||||
|
||||
// TODO Docusaurus v4: remove this, only kept for retro-compatibility
|
||||
// Exceptions are not made for control flow logic
|
||||
/** Custom error thrown when the current file is not tracked by git. */
|
||||
export class FileNotTrackedError extends Error {}
|
||||
|
||||
/**
|
||||
* Fetches the git history of a file and returns a relevant commit date.
|
||||
* It gets the commit date instead of author date so that amended commits
|
||||
* can have their dates updated.
|
||||
*
|
||||
* @throws {@link GitNotFoundError} If git is not found in `PATH`.
|
||||
* @throws {@link FileNotTrackedError} If the current file is not tracked by git.
|
||||
* @throws Also throws when `git log` exited with non-zero, or when it outputs
|
||||
* unexpected text.
|
||||
*/
|
||||
export async function getFileCommitDate(
|
||||
/** Absolute path to the file. */
|
||||
file: string,
|
||||
args: {
|
||||
/**
|
||||
* `"oldest"` is the commit that added the file, following renames;
|
||||
* `"newest"` is the last commit that edited the file.
|
||||
*/
|
||||
age?: 'oldest' | 'newest';
|
||||
/** Use `includeAuthor: true` to get the author information as well. */
|
||||
includeAuthor?: false;
|
||||
},
|
||||
): Promise<{
|
||||
/** Relevant commit date. */
|
||||
date: Date; // TODO duplicate data, not really useful?
|
||||
/** Timestamp returned from git, converted to **milliseconds**. */
|
||||
timestamp: number;
|
||||
}>;
|
||||
/**
|
||||
* Fetches the git history of a file and returns a relevant commit date.
|
||||
* It gets the commit date instead of author date so that amended commits
|
||||
* can have their dates updated.
|
||||
*
|
||||
* @throws {@link GitNotFoundError} If git is not found in `PATH`.
|
||||
* @throws {@link FileNotTrackedError} If the current file is not tracked by git.
|
||||
* @throws Also throws when `git log` exited with non-zero, or when it outputs
|
||||
* unexpected text.
|
||||
*/
|
||||
export async function getFileCommitDate(
|
||||
/** Absolute path to the file. */
|
||||
file: string,
|
||||
args: {
|
||||
/**
|
||||
* `"oldest"` is the commit that added the file, following renames;
|
||||
* `"newest"` is the last commit that edited the file.
|
||||
*/
|
||||
age?: 'oldest' | 'newest';
|
||||
includeAuthor: true;
|
||||
},
|
||||
): Promise<{
|
||||
/** Relevant commit date. */
|
||||
date: Date;
|
||||
/** Timestamp returned from git, converted to **milliseconds**. */
|
||||
timestamp: number;
|
||||
/** The author's name, as returned from git. */
|
||||
author: string;
|
||||
}>;
|
||||
|
||||
export async function getFileCommitDate(
|
||||
file: string,
|
||||
{
|
||||
age = 'oldest',
|
||||
includeAuthor = false,
|
||||
}: {
|
||||
age?: 'oldest' | 'newest';
|
||||
includeAuthor?: boolean;
|
||||
},
|
||||
): Promise<{
|
||||
date: Date;
|
||||
timestamp: number;
|
||||
author?: string;
|
||||
}> {
|
||||
if (!hasGit()) {
|
||||
throw new GitNotFoundError(
|
||||
`Failed to retrieve git history for "${file}" because git is not installed.`,
|
||||
);
|
||||
}
|
||||
|
||||
if (!(await fs.pathExists(file))) {
|
||||
throw new Error(
|
||||
`Failed to retrieve git history for "${file}" because the file does not exist.`,
|
||||
);
|
||||
}
|
||||
|
||||
// We add a "RESULT:" prefix to make parsing easier
|
||||
// See why: https://github.com/facebook/docusaurus/pull/10022
|
||||
const resultFormat = includeAuthor ? 'RESULT:%ct,%an' : 'RESULT:%ct';
|
||||
|
||||
const args = [
|
||||
`--format=${resultFormat}`,
|
||||
'--max-count=1',
|
||||
age === 'oldest' ? '--follow --diff-filter=A' : undefined,
|
||||
]
|
||||
.filter(Boolean)
|
||||
.join(' ');
|
||||
|
||||
// Do not include GPG signature in the log output
|
||||
// See https://github.com/facebook/docusaurus/pull/10022
|
||||
const command = `git -c log.showSignature=false log ${args} -- "${path.basename(
|
||||
file,
|
||||
)}"`;
|
||||
|
||||
const result = (await GitCommandQueue.add(() => {
|
||||
return execa(command, {
|
||||
cwd: path.dirname(file),
|
||||
shell: true,
|
||||
});
|
||||
}))!;
|
||||
|
||||
if (result.exitCode !== 0) {
|
||||
throw new Error(
|
||||
`Failed to retrieve the git history for file "${file}" with exit code ${result.exitCode}: ${result.stderr}`,
|
||||
);
|
||||
}
|
||||
|
||||
// We only parse the output line starting with our "RESULT:" prefix
|
||||
// See why https://github.com/facebook/docusaurus/pull/10022
|
||||
const regex = includeAuthor
|
||||
? /(?:^|\n)RESULT:(?<timestamp>\d+),(?<author>.+)(?:$|\n)/
|
||||
: /(?:^|\n)RESULT:(?<timestamp>\d+)(?:$|\n)/;
|
||||
|
||||
const output = result.stdout.trim();
|
||||
|
||||
if (!output) {
|
||||
throw new FileNotTrackedError(
|
||||
`Failed to retrieve the git history for file "${file}" because the file is not tracked by git.`,
|
||||
);
|
||||
}
|
||||
|
||||
const match = output.match(regex);
|
||||
|
||||
if (!match) {
|
||||
throw new Error(
|
||||
`Failed to retrieve the git history for file "${file}" with unexpected output: ${output}`,
|
||||
);
|
||||
}
|
||||
|
||||
const timestampInSeconds = Number(match.groups!.timestamp);
|
||||
const timestamp = timestampInSeconds * 1_000;
|
||||
const date = new Date(timestamp);
|
||||
|
||||
if (includeAuthor) {
|
||||
return {date, timestamp, author: match.groups!.author!};
|
||||
}
|
||||
return {date, timestamp};
|
||||
}
|
||||
|
||||
let showedGitRequirementError = false;
|
||||
let showedFileNotTrackedError = false;
|
||||
|
||||
type GitCommitInfo = {timestamp: number; author: string};
|
||||
|
||||
async function getGitCommitInfo(
|
||||
filePath: string,
|
||||
age: 'oldest' | 'newest',
|
||||
): Promise<GitCommitInfo | null> {
|
||||
if (!filePath) {
|
||||
return null;
|
||||
}
|
||||
// Wrap in try/catch in case the shell commands fail
|
||||
// (e.g. project doesn't use Git, etc).
|
||||
try {
|
||||
const result = await getFileCommitDate(filePath, {
|
||||
age,
|
||||
includeAuthor: true,
|
||||
});
|
||||
return {timestamp: result.timestamp, author: result.author};
|
||||
} catch (err) {
|
||||
// TODO legacy perf issue: do not use exceptions for control flow!
|
||||
if (err instanceof GitNotFoundError) {
|
||||
if (!showedGitRequirementError) {
|
||||
logger.warn('Sorry, the last update options require Git.');
|
||||
showedGitRequirementError = true;
|
||||
}
|
||||
} else if (err instanceof FileNotTrackedError) {
|
||||
if (!showedFileNotTrackedError) {
|
||||
logger.warn(
|
||||
'Cannot infer the update date for some files, as they are not tracked by git.',
|
||||
);
|
||||
showedFileNotTrackedError = true;
|
||||
}
|
||||
} else {
|
||||
throw new Error(
|
||||
`An error occurred when trying to get the last update date`,
|
||||
{cause: err},
|
||||
);
|
||||
}
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
export async function getGitLastUpdate(
|
||||
filePath: string,
|
||||
): Promise<GitCommitInfo | null> {
|
||||
return getGitCommitInfo(filePath, 'newest');
|
||||
}
|
||||
|
||||
export async function getGitCreation(
|
||||
filePath: string,
|
||||
): Promise<GitCommitInfo | null> {
|
||||
return getGitCommitInfo(filePath, 'oldest');
|
||||
}
|
||||
|
||||
export async function getGitRepoRoot(cwd: string): Promise<string> {
|
||||
const createErrorMessageBase = () => {
|
||||
return `Couldn't find the git repository root directory
|
||||
Failure while running ${logger.code(
|
||||
'git rev-parse --show-toplevel',
|
||||
)} from cwd=${logger.path(cwd)}`;
|
||||
};
|
||||
|
||||
const result = await execa('git', ['rev-parse', '--show-toplevel'], {
|
||||
cwd,
|
||||
}).catch((error) => {
|
||||
// We enter this rejection when cwd is not a dir for example
|
||||
throw new Error(
|
||||
`${createErrorMessageBase()}
|
||||
The command executed throws an error: ${error.message}`,
|
||||
{cause: error},
|
||||
);
|
||||
});
|
||||
|
||||
if (result.exitCode !== 0) {
|
||||
throw new Error(
|
||||
`${createErrorMessageBase()}
|
||||
The command returned exit code ${logger.code(result.exitCode)}: ${logger.subdue(
|
||||
result.stderr,
|
||||
)}`,
|
||||
);
|
||||
}
|
||||
|
||||
return fs.realpath.native(result.stdout.trim());
|
||||
}
|
||||
|
||||
// A Git "superproject" is a Git repository that contains submodules
|
||||
// See https://git-scm.com/docs/git-rev-parse#Documentation/git-rev-parse.txt---show-superproject-working-tree
|
||||
// See https://git-scm.com/book/en/v2/Git-Tools-Submodules
|
||||
export async function getGitSuperProjectRoot(
|
||||
cwd: string,
|
||||
): Promise<string | null> {
|
||||
const createErrorMessageBase = () => {
|
||||
return `Couldn't find the git superproject root directory
|
||||
Failure while running ${logger.code(
|
||||
'git rev-parse --show-superproject-working-tree',
|
||||
)} from cwd=${logger.path(cwd)}`;
|
||||
};
|
||||
|
||||
const result = await execa(
|
||||
'git',
|
||||
['rev-parse', '--show-superproject-working-tree'],
|
||||
{
|
||||
cwd,
|
||||
},
|
||||
).catch((error) => {
|
||||
// We enter this rejection when cwd is not a dir for example
|
||||
throw new Error(
|
||||
`${createErrorMessageBase()}
|
||||
The command executed throws an error: ${error.message}`,
|
||||
{cause: error},
|
||||
);
|
||||
});
|
||||
|
||||
if (result.exitCode !== 0) {
|
||||
throw new Error(
|
||||
`${createErrorMessageBase()}
|
||||
The command returned exit code ${logger.code(result.exitCode)}: ${logger.subdue(
|
||||
result.stderr,
|
||||
)}`,
|
||||
);
|
||||
}
|
||||
|
||||
const output = result.stdout.trim();
|
||||
// this command only works when inside submodules
|
||||
// otherwise it doesn't return anything when we are inside the main repo
|
||||
if (output) {
|
||||
return fs.realpath.native(output);
|
||||
}
|
||||
return getGitRepoRoot(cwd);
|
||||
}
|
||||
|
||||
// See https://git-scm.com/book/en/v2/Git-Tools-Submodules
|
||||
export async function getGitSubmodulePaths(cwd: string): Promise<string[]> {
|
||||
const createErrorMessageBase = () => {
|
||||
return `Couldn't read the list of git submodules
|
||||
Failure while running ${logger.code(
|
||||
'git submodule status',
|
||||
)} from cwd=${logger.path(cwd)}`;
|
||||
};
|
||||
|
||||
const result = await execa('git', ['submodule', 'status'], {
|
||||
cwd,
|
||||
}).catch((error) => {
|
||||
// We enter this rejection when cwd is not a dir for example
|
||||
throw new Error(
|
||||
`${createErrorMessageBase()}
|
||||
The command executed throws an error: ${error.message}`,
|
||||
{cause: error},
|
||||
);
|
||||
});
|
||||
|
||||
if (result.exitCode !== 0) {
|
||||
throw new Error(
|
||||
`${createErrorMessageBase()}
|
||||
The command returned exit code ${logger.code(result.exitCode)}: ${logger.subdue(
|
||||
result.stderr,
|
||||
)}`,
|
||||
);
|
||||
}
|
||||
|
||||
const output = result.stdout.trim();
|
||||
|
||||
if (!output) {
|
||||
return [];
|
||||
}
|
||||
|
||||
/* The output may contain a space/-/+/U prefix, for example
|
||||
1234567e3e35d1f5b submodules/foo (heads/main)
|
||||
-9ab1f1d3a2d77b0a4 submodules/bar (heads/dev)
|
||||
+f00ba42e1b3ddead submodules/baz (remotes/origin/main)
|
||||
Udeadbeefcafe1234 submodules/qux
|
||||
*/
|
||||
const getSubmodulePath = async (line: string) => {
|
||||
const submodulePath = line.substring(1).split(' ')[1];
|
||||
if (!submodulePath) {
|
||||
throw new Error(`Failed to parse git submodule line: ${line}`);
|
||||
}
|
||||
return submodulePath;
|
||||
};
|
||||
|
||||
return Promise.all(output.split('\n').map(getSubmodulePath));
|
||||
}
|
||||
|
||||
// Find the root git repository alongside all its submodules, if any
|
||||
export async function getGitAllRepoRoots(cwd: string): Promise<string[]> {
|
||||
try {
|
||||
const superProjectRoot = await getGitSuperProjectRoot(cwd);
|
||||
if (!superProjectRoot) {
|
||||
return [];
|
||||
}
|
||||
let submodulePaths = await getGitSubmodulePaths(superProjectRoot);
|
||||
submodulePaths = await Promise.all(
|
||||
submodulePaths.map((submodulePath) =>
|
||||
fs.realpath.native(path.resolve(superProjectRoot, submodulePath)),
|
||||
),
|
||||
);
|
||||
return [superProjectRoot, ...submodulePaths];
|
||||
} catch (error) {
|
||||
throw new Error(
|
||||
`Could not get all the git repository root paths (superproject + submodules) from cwd=${cwd}`,
|
||||
{cause: error},
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
// Useful information about a file tracked in a Git repository
|
||||
export type GitFileInfo = {
|
||||
creation: GitCommitInfo;
|
||||
lastUpdate: GitCommitInfo;
|
||||
};
|
||||
|
||||
// A map of all the files tracked in a Git repository
|
||||
export type GitFileInfoMap = Map<string, GitFileInfo>;
|
||||
|
||||
// Logic inspired from Astro Starlight:
|
||||
// See https://bsky.app/profile/bluwy.me/post/3lyihod6qos2a
|
||||
// See https://github.com/withastro/starlight/blob/c417f1efd463be63b7230617d72b120caed098cd/packages/starlight/utils/git.ts#L58
|
||||
export async function getGitRepositoryFilesInfo(
|
||||
cwd: string,
|
||||
): Promise<GitFileInfoMap> {
|
||||
// git --no-pager -c log.showSignature=false log --format=t:%ct,a:%an --name-status
|
||||
const result = await execa(
|
||||
'git',
|
||||
[
|
||||
'--no-pager',
|
||||
// Do not include GPG signature in the log output
|
||||
// See https://github.com/facebook/docusaurus/pull/10022
|
||||
'-c',
|
||||
'log.showSignature=false',
|
||||
// The git command we want to run
|
||||
'log',
|
||||
// Format each history entry as t:<seconds since epoch>
|
||||
'--format=t:%ct,a:%an',
|
||||
// In each entry include the name and status for each modified file
|
||||
'--name-status',
|
||||
|
||||
// For creation info, should we use --follow --find-renames=100% ???
|
||||
],
|
||||
{
|
||||
cwd,
|
||||
encoding: 'utf-8',
|
||||
// TODO use streaming to avoid a large buffer
|
||||
// See https://github.com/withastro/starlight/issues/3154
|
||||
maxBuffer: 20 * 1024 * 1024,
|
||||
},
|
||||
);
|
||||
|
||||
if (result.exitCode !== 0) {
|
||||
throw new Error(
|
||||
`Docusaurus failed to run the 'git log' to retrieve tracked files last update date/author.
|
||||
The command exited with code ${result.exitCode}: ${result.stderr}`,
|
||||
);
|
||||
}
|
||||
|
||||
const logLines = result.stdout.split('\n');
|
||||
|
||||
const now = Date.now();
|
||||
|
||||
// TODO not fail-fast
|
||||
let runningDate = now;
|
||||
let runningAuthor = 'N/A';
|
||||
const runningMap: GitFileInfoMap = new Map();
|
||||
|
||||
for (const logLine of logLines) {
|
||||
if (logLine.startsWith('t:')) {
|
||||
// t:<timestamp>,a:<author name>
|
||||
const [timestampStr, authorStr] = logLine.split(',') as [string, string];
|
||||
const timestamp = Number.parseInt(timestampStr.slice(2), 10) * 1000;
|
||||
const author = authorStr.slice(2);
|
||||
|
||||
runningDate = timestamp;
|
||||
runningAuthor = author;
|
||||
}
|
||||
|
||||
// TODO the code below doesn't handle delete/move/rename operations properly
|
||||
// it returns files that no longer exist in the repo (deleted/moved)
|
||||
|
||||
// - Added files take the format `A\t<file>`
|
||||
// - Modified files take the format `M\t<file>`
|
||||
// - Deleted files take the format `D\t<file>`
|
||||
// - Renamed files take the format `R<count>\t<old>\t<new>`
|
||||
// - Copied files take the format `C<count>\t<old>\t<new>`
|
||||
// The name of the file as of the commit being processed is always
|
||||
// the last part of the log line.
|
||||
const tabSplit = logLine.lastIndexOf('\t');
|
||||
if (tabSplit === -1) {
|
||||
continue;
|
||||
}
|
||||
const relativeFile = logLine.slice(tabSplit + 1);
|
||||
|
||||
const currentFileInfo = runningMap.get(relativeFile);
|
||||
|
||||
const currentCreationTime = currentFileInfo?.creation.timestamp || now;
|
||||
const newCreationTime = Math.min(currentCreationTime, runningDate);
|
||||
const newCreation: GitCommitInfo =
|
||||
!currentFileInfo || newCreationTime !== currentCreationTime
|
||||
? {timestamp: newCreationTime, author: runningAuthor}
|
||||
: currentFileInfo.creation;
|
||||
|
||||
const currentLastUpdateTime = currentFileInfo?.lastUpdate.timestamp || 0;
|
||||
const newLastUpdateTime = Math.max(currentLastUpdateTime, runningDate);
|
||||
const newLastUpdate: GitCommitInfo =
|
||||
!currentFileInfo || newLastUpdateTime !== currentLastUpdateTime
|
||||
? {timestamp: newLastUpdateTime, author: runningAuthor}
|
||||
: currentFileInfo.lastUpdate;
|
||||
|
||||
runningMap.set(relativeFile, {
|
||||
creation: newCreation,
|
||||
lastUpdate: newLastUpdate,
|
||||
});
|
||||
}
|
||||
|
||||
return runningMap;
|
||||
}
|
||||
|
|
@ -1,54 +0,0 @@
|
|||
/**
|
||||
* Copyright (c) Facebook, Inc. and its affiliates.
|
||||
*
|
||||
* This source code is licensed under the MIT license found in the
|
||||
* LICENSE file in the root directory of this source tree.
|
||||
*/
|
||||
|
||||
import {
|
||||
VCS_HARDCODED_CREATION_INFO,
|
||||
VCS_HARDCODED_LAST_UPDATE_INFO,
|
||||
VCS_HARDCODED_UNTRACKED_FILE_PATH,
|
||||
VcsHardcoded,
|
||||
} from './vcsHardcoded';
|
||||
import {VcsGitAdHoc} from './vcsGitAdHoc';
|
||||
import {VscGitEager} from './vcsGitEager';
|
||||
import {VcsDisabled} from './vcsDisabled';
|
||||
import {VcsDefaultV1} from './vcsDefaultV1';
|
||||
import {VcsDefaultV2} from './vcsDefaultV2';
|
||||
import type {VcsConfig, VcsPreset} from '@docusaurus/types';
|
||||
|
||||
const VcsPresets: Record<VcsPreset, VcsConfig> = {
|
||||
'git-ad-hoc': VcsGitAdHoc,
|
||||
'git-eager': VscGitEager,
|
||||
hardcoded: VcsHardcoded,
|
||||
disabled: VcsDisabled,
|
||||
|
||||
'default-v1': VcsDefaultV1,
|
||||
'default-v2': VcsDefaultV2,
|
||||
};
|
||||
|
||||
export const VcsPresetNames = Object.keys(VcsPresets) as VcsPreset[];
|
||||
|
||||
export function findVcsPreset(presetName: string): VcsConfig | undefined {
|
||||
return VcsPresets[presetName as VcsPreset];
|
||||
}
|
||||
|
||||
export function getVcsPreset(presetName: VcsPreset): VcsConfig {
|
||||
const vcs = findVcsPreset(presetName);
|
||||
if (vcs) {
|
||||
return vcs;
|
||||
} else {
|
||||
throw new Error(
|
||||
`Unknown Docusaurus VCS preset name: ${process.env.DOCUSAURUS_VCS}`,
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
// Convenient export for writing unit tests depending on VCS
|
||||
export const TEST_VCS = {
|
||||
CREATION_INFO: VCS_HARDCODED_CREATION_INFO,
|
||||
LAST_UPDATE_INFO: VCS_HARDCODED_LAST_UPDATE_INFO,
|
||||
UNTRACKED_FILE_PATH: VCS_HARDCODED_UNTRACKED_FILE_PATH,
|
||||
...VcsHardcoded,
|
||||
};
|
||||
|
|
@ -1,33 +0,0 @@
|
|||
/**
|
||||
* Copyright (c) Facebook, Inc. and its affiliates.
|
||||
*
|
||||
* This source code is licensed under the MIT license found in the
|
||||
* LICENSE file in the root directory of this source tree.
|
||||
*/
|
||||
|
||||
import {VcsHardcoded} from './vcsHardcoded';
|
||||
import {VcsGitAdHoc} from './vcsGitAdHoc';
|
||||
import type {VcsConfig} from '@docusaurus/types';
|
||||
|
||||
function getDynamicStrategy(): VcsConfig {
|
||||
return process.env.NODE_ENV === 'development' ||
|
||||
process.env.NODE_ENV === 'test'
|
||||
? VcsHardcoded
|
||||
: VcsGitAdHoc;
|
||||
}
|
||||
|
||||
/**
|
||||
* This VCS implements the historical Git automatic strategy.
|
||||
* It is only enabled in production mode, using ad-hoc git log commands.
|
||||
*/
|
||||
export const VcsDefaultV1: VcsConfig = {
|
||||
initialize: (...params) => {
|
||||
return getDynamicStrategy().initialize(...params);
|
||||
},
|
||||
getFileCreationInfo: (...params) => {
|
||||
return getDynamicStrategy().getFileCreationInfo(...params);
|
||||
},
|
||||
getFileLastUpdateInfo: (...params) => {
|
||||
return getDynamicStrategy().getFileLastUpdateInfo(...params);
|
||||
},
|
||||
};
|
||||
|
|
@ -1,33 +0,0 @@
|
|||
/**
|
||||
* Copyright (c) Facebook, Inc. and its affiliates.
|
||||
*
|
||||
* This source code is licensed under the MIT license found in the
|
||||
* LICENSE file in the root directory of this source tree.
|
||||
*/
|
||||
|
||||
import {VcsHardcoded} from './vcsHardcoded';
|
||||
import {VscGitEager} from './vcsGitEager';
|
||||
import type {VcsConfig} from '@docusaurus/types';
|
||||
|
||||
function getStrategy(): VcsConfig {
|
||||
return process.env.NODE_ENV === 'development' ||
|
||||
process.env.NODE_ENV === 'test'
|
||||
? VcsHardcoded
|
||||
: VscGitEager;
|
||||
}
|
||||
|
||||
/**
|
||||
* This VCS implements the new eager Git automatic strategy.
|
||||
* It is only enabled in production mode, reading the git repository eagerly.
|
||||
*/
|
||||
export const VcsDefaultV2: VcsConfig = {
|
||||
initialize: (...params) => {
|
||||
return getStrategy().initialize(...params);
|
||||
},
|
||||
getFileCreationInfo: (...params) => {
|
||||
return getStrategy().getFileCreationInfo(...params);
|
||||
},
|
||||
getFileLastUpdateInfo: (...params) => {
|
||||
return getStrategy().getFileLastUpdateInfo(...params);
|
||||
},
|
||||
};
|
||||
|
|
@ -1,25 +0,0 @@
|
|||
/**
|
||||
* Copyright (c) Facebook, Inc. and its affiliates.
|
||||
*
|
||||
* This source code is licensed under the MIT license found in the
|
||||
* LICENSE file in the root directory of this source tree.
|
||||
*/
|
||||
|
||||
import type {VcsConfig} from '@docusaurus/types';
|
||||
|
||||
/**
|
||||
* This VCS implementation always returns null values
|
||||
*/
|
||||
export const VcsDisabled: VcsConfig = {
|
||||
initialize: () => {
|
||||
// Noop
|
||||
},
|
||||
|
||||
getFileCreationInfo: async (_filePath) => {
|
||||
return null;
|
||||
},
|
||||
|
||||
getFileLastUpdateInfo: async (_ilePath) => {
|
||||
return null;
|
||||
},
|
||||
};
|
||||
|
|
@ -1,30 +0,0 @@
|
|||
/**
|
||||
* Copyright (c) Facebook, Inc. and its affiliates.
|
||||
*
|
||||
* This source code is licensed under the MIT license found in the
|
||||
* LICENSE file in the root directory of this source tree.
|
||||
*/
|
||||
|
||||
import {getGitLastUpdate, getGitCreation} from './gitUtils';
|
||||
import type {VcsConfig} from '@docusaurus/types';
|
||||
|
||||
/**
|
||||
* A VCS strategy to query Git information in an ad-hoc way.
|
||||
* This is the default/historical Docusaurus Git VCS implementation.
|
||||
* Unfortunately, it is a major bottleneck for large sites/repositories.
|
||||
*
|
||||
* See also https://github.com/facebook/docusaurus/issues/11208
|
||||
*/
|
||||
export const VcsGitAdHoc: VcsConfig = {
|
||||
initialize: () => {
|
||||
// Nothing to do here for the default/historical Git implementation
|
||||
},
|
||||
|
||||
getFileCreationInfo: async (filePath: string) => {
|
||||
return getGitCreation(filePath);
|
||||
},
|
||||
|
||||
getFileLastUpdateInfo: async (filePath: string) => {
|
||||
return getGitLastUpdate(filePath);
|
||||
},
|
||||
};
|
||||
|
|
@ -1,99 +0,0 @@
|
|||
/**
|
||||
* Copyright (c) Facebook, Inc. and its affiliates.
|
||||
*
|
||||
* This source code is licensed under the MIT license found in the
|
||||
* LICENSE file in the root directory of this source tree.
|
||||
*/
|
||||
|
||||
import {resolve, basename} from 'node:path';
|
||||
import logger, {PerfLogger} from '@docusaurus/logger';
|
||||
import {getGitAllRepoRoots, getGitRepositoryFilesInfo} from './gitUtils';
|
||||
import type {GitFileInfo, GitFileInfoMap} from './gitUtils';
|
||||
import type {VcsConfig} from '@docusaurus/types';
|
||||
|
||||
// The Map keys should be absolute file paths, not relative Git paths
|
||||
function resolveFileInfoMapPaths(
|
||||
repoRoot: string,
|
||||
filesInfo: GitFileInfoMap,
|
||||
): GitFileInfoMap {
|
||||
function transformMapEntry(
|
||||
entry: [string, GitFileInfo],
|
||||
): [string, GitFileInfo] {
|
||||
// We just resolve the Git paths that are relative to the repo root
|
||||
return [resolve(repoRoot, entry[0]), entry[1]];
|
||||
}
|
||||
|
||||
return new Map(Array.from(filesInfo.entries()).map(transformMapEntry));
|
||||
}
|
||||
|
||||
function mergeFileMaps(fileMaps: GitFileInfoMap[]): GitFileInfoMap {
|
||||
return new Map(fileMaps.flatMap((m) => [...m]));
|
||||
}
|
||||
|
||||
async function loadAllGitFilesInfoMap(cwd: string): Promise<GitFileInfoMap> {
|
||||
const roots = await PerfLogger.async('Reading Git root dirs', () =>
|
||||
getGitAllRepoRoots(cwd),
|
||||
);
|
||||
|
||||
const allMaps: GitFileInfoMap[] = await Promise.all(
|
||||
roots.map(async (root) => {
|
||||
const map = await PerfLogger.async(
|
||||
`Reading Git history for repo ${logger.path(basename(root))}`,
|
||||
() => getGitRepositoryFilesInfo(root),
|
||||
);
|
||||
return resolveFileInfoMapPaths(root, map);
|
||||
}),
|
||||
);
|
||||
|
||||
return mergeFileMaps(allMaps);
|
||||
}
|
||||
|
||||
function createGitVcsConfig(): VcsConfig {
|
||||
let filesMapPromise: Promise<GitFileInfoMap> | null = null;
|
||||
|
||||
async function getGitFileInfo(filePath: string): Promise<GitFileInfo | null> {
|
||||
const filesMap = await filesMapPromise;
|
||||
return filesMap?.get(filePath) ?? null;
|
||||
}
|
||||
|
||||
return {
|
||||
initialize: ({siteDir}) => {
|
||||
if (filesMapPromise) {
|
||||
// We only initialize this VCS once!
|
||||
// For i18n sites, this permits reading ahead of time for all locales
|
||||
// so that it only slows down the first locale
|
||||
// I assume this logic is fine, but we'll see if it causes trouble
|
||||
|
||||
// Note: we could also only call "initialize()" once from the outside,
|
||||
// But maybe it could be useful for custom VCS implementations to be
|
||||
// able to initialize once per locale?
|
||||
PerfLogger.log(
|
||||
'Git Eager VCS strategy already initialized, skipping re-initialization',
|
||||
);
|
||||
return;
|
||||
}
|
||||
|
||||
filesMapPromise = PerfLogger.async('Git Eager VCS init', () =>
|
||||
loadAllGitFilesInfoMap(siteDir),
|
||||
);
|
||||
filesMapPromise.catch((error) => {
|
||||
console.error(
|
||||
'Failed to initialize the Docusaurus Git Eager VCS strategy',
|
||||
error,
|
||||
);
|
||||
});
|
||||
},
|
||||
|
||||
getFileCreationInfo: async (filePath: string) => {
|
||||
const fileInfo = await getGitFileInfo(filePath);
|
||||
return fileInfo?.creation ?? null;
|
||||
},
|
||||
|
||||
getFileLastUpdateInfo: async (filePath: string) => {
|
||||
const fileInfo = await getGitFileInfo(filePath);
|
||||
return fileInfo?.lastUpdate ?? null;
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
export const VscGitEager: VcsConfig = createGitVcsConfig();
|
||||
|
|
@ -1,45 +0,0 @@
|
|||
/**
|
||||
* Copyright (c) Facebook, Inc. and its affiliates.
|
||||
*
|
||||
* This source code is licensed under the MIT license found in the
|
||||
* LICENSE file in the root directory of this source tree.
|
||||
*/
|
||||
|
||||
import type {VcsConfig, VcsChangeInfo} from '@docusaurus/types';
|
||||
|
||||
export const VCS_HARDCODED_CREATION_INFO: VcsChangeInfo = {
|
||||
timestamp: 1490997600000, // 1st Apr 2017
|
||||
author: 'Creator',
|
||||
};
|
||||
|
||||
export const VCS_HARDCODED_LAST_UPDATE_INFO: VcsChangeInfo = {
|
||||
timestamp: 1539502055000, // 14th Oct 2018
|
||||
author: 'Author',
|
||||
};
|
||||
|
||||
export const VCS_HARDCODED_UNTRACKED_FILE_PATH = `file/path/${Math.random()}.mdx`;
|
||||
|
||||
/**
|
||||
* This VCS implementation always returns hardcoded values for testing purposes.
|
||||
* It is also useful in dev environments where VCS info is not important.
|
||||
* Reading information from the VCS can be slow and is not always necessary.
|
||||
*/
|
||||
export const VcsHardcoded: VcsConfig = {
|
||||
initialize: () => {
|
||||
// Noop
|
||||
},
|
||||
|
||||
getFileCreationInfo: async (filePath: string) => {
|
||||
if (filePath === VCS_HARDCODED_UNTRACKED_FILE_PATH) {
|
||||
return null;
|
||||
}
|
||||
return VCS_HARDCODED_CREATION_INFO;
|
||||
},
|
||||
|
||||
getFileLastUpdateInfo: async (filePath: string) => {
|
||||
if (filePath === VCS_HARDCODED_UNTRACKED_FILE_PATH) {
|
||||
return null;
|
||||
}
|
||||
return VCS_HARDCODED_LAST_UPDATE_INFO;
|
||||
},
|
||||
};
|
||||
|
|
@ -51,7 +51,7 @@
|
|||
"escape-html": "^1.0.3",
|
||||
"eta": "^2.2.0",
|
||||
"eval": "^0.1.8",
|
||||
"execa": "^5.1.1",
|
||||
"execa": "5.1.1",
|
||||
"fs-extra": "^11.1.1",
|
||||
"html-tags": "^3.3.1",
|
||||
"html-webpack-plugin": "^5.6.0",
|
||||
|
|
|
|||
|
|
@ -28,7 +28,7 @@ describe('isInternalUrl', () => {
|
|||
expect(isInternalUrl('https://foo.com')).toBeFalsy();
|
||||
});
|
||||
|
||||
it('returns false for relative protocol links', () => {
|
||||
it('returns false for whatever protocol links', () => {
|
||||
expect(isInternalUrl('//foo.com')).toBeFalsy();
|
||||
});
|
||||
|
||||
|
|
@ -43,50 +43,4 @@ describe('isInternalUrl', () => {
|
|||
it('returns false for undefined links', () => {
|
||||
expect(isInternalUrl(undefined)).toBeFalsy();
|
||||
});
|
||||
|
||||
describe('custom scheme links', () => {
|
||||
it('returns true for invalid protocol schemes', () => {
|
||||
expect(isInternalUrl('+customScheme://')).toBeTruthy();
|
||||
expect(isInternalUrl('+customScheme://whatever')).toBeTruthy();
|
||||
expect(isInternalUrl('+customScheme:whatever')).toBeTruthy();
|
||||
|
||||
expect(isInternalUrl('.customScheme://')).toBeTruthy();
|
||||
expect(isInternalUrl('.customScheme://whatever')).toBeTruthy();
|
||||
expect(isInternalUrl('.customScheme:whatever')).toBeTruthy();
|
||||
|
||||
expect(isInternalUrl('-customScheme://')).toBeTruthy();
|
||||
expect(isInternalUrl('-customScheme://whatever')).toBeTruthy();
|
||||
expect(isInternalUrl('-customScheme:whatever')).toBeTruthy();
|
||||
|
||||
expect(isInternalUrl('custom_scheme://')).toBeTruthy();
|
||||
expect(isInternalUrl('custom_scheme://whatever')).toBeTruthy();
|
||||
expect(isInternalUrl('custom_scheme:whatever')).toBeTruthy();
|
||||
|
||||
expect(isInternalUrl('custom scheme://')).toBeTruthy();
|
||||
expect(isInternalUrl('custom scheme://whatever')).toBeTruthy();
|
||||
expect(isInternalUrl('custom scheme:whatever')).toBeTruthy();
|
||||
|
||||
expect(isInternalUrl('custom$scheme://')).toBeTruthy();
|
||||
expect(isInternalUrl('custom$scheme://whatever')).toBeTruthy();
|
||||
expect(isInternalUrl('custom$scheme:whatever')).toBeTruthy();
|
||||
});
|
||||
|
||||
it('returns false valid protocol schemes', () => {
|
||||
expect(isInternalUrl('customScheme://')).toBeFalsy();
|
||||
expect(isInternalUrl('customScheme://whatever')).toBeFalsy();
|
||||
expect(isInternalUrl('customScheme:whatever')).toBeFalsy();
|
||||
|
||||
expect(isInternalUrl('custom-scheme://')).toBeFalsy();
|
||||
expect(isInternalUrl('custom-scheme://whatever')).toBeFalsy();
|
||||
expect(isInternalUrl('custom-scheme:whatever')).toBeFalsy();
|
||||
|
||||
expect(isInternalUrl('custom.scheme://')).toBeFalsy();
|
||||
expect(isInternalUrl('custom.scheme://whatever')).toBeFalsy();
|
||||
expect(isInternalUrl('custom.scheme:whatever')).toBeFalsy();
|
||||
|
||||
expect(isInternalUrl('custom-sch.eme+-.://')).toBeFalsy();
|
||||
expect(isInternalUrl('custom-sch.eme+-.://whatever')).toBeFalsy();
|
||||
expect(isInternalUrl('custom-sch.eme+-.:whatever')).toBeFalsy();
|
||||
});
|
||||
});
|
||||
});
|
||||
|
|
|
|||
|
|
@ -5,11 +5,8 @@
|
|||
* LICENSE file in the root directory of this source tree.
|
||||
*/
|
||||
|
||||
// Poor man's protocol detection
|
||||
// Spec: https://datatracker.ietf.org/doc/html/rfc3986#section-3.1
|
||||
// In particular: scheme = ALPHA *( ALPHA / DIGIT / "+" / "-" / "." )
|
||||
export function hasProtocol(url: string): boolean {
|
||||
return /^(?:[A-Za-z][A-Za-z\d+.-]*:|\/\/)/.test(url);
|
||||
return /^(?:\w*:|\/\/)/.test(url);
|
||||
}
|
||||
|
||||
export default function isInternalUrl(url?: string): boolean {
|
||||
|
|
|
|||
|
|
@ -8,10 +8,10 @@
|
|||
import fs from 'fs-extra';
|
||||
import logger, {PerfLogger} from '@docusaurus/logger';
|
||||
import {mapAsyncSequential} from '@docusaurus/utils';
|
||||
import {type LoadContextParams} from '../../server/site';
|
||||
import {getLocaleList} from '../../server/i18n';
|
||||
import {loadContext, type LoadContextParams} from '../../server/site';
|
||||
import {loadI18n} from '../../server/i18n';
|
||||
import {buildLocale, type BuildLocaleParams} from './buildLocale';
|
||||
import {loadSiteConfig} from '../../server/config';
|
||||
import {isAutomaticBaseUrlLocalizationDisabled} from './buildUtils';
|
||||
|
||||
export type BuildCLIOptions = Pick<LoadContextParams, 'config' | 'outDir'> & {
|
||||
locale?: [string, ...string[]];
|
||||
|
|
@ -81,21 +81,27 @@ async function getLocalesToBuild({
|
|||
siteDir: string;
|
||||
cliOptions: BuildCLIOptions;
|
||||
}): Promise<[string, ...string[]]> {
|
||||
const {siteConfig} = await loadSiteConfig({
|
||||
// TODO we shouldn't need to load all context + i18n just to get that list
|
||||
// only loading siteConfig should be enough
|
||||
const context = await loadContext({
|
||||
siteDir,
|
||||
customConfigFilePath: cliOptions.config,
|
||||
outDir: cliOptions.outDir,
|
||||
config: cliOptions.config,
|
||||
automaticBaseUrlLocalizationDisabled: isAutomaticBaseUrlLocalizationDisabled(cliOptions),
|
||||
});
|
||||
|
||||
const locales =
|
||||
cliOptions.locale ??
|
||||
getLocaleList({
|
||||
i18nConfig: siteConfig.i18n,
|
||||
currentLocale: siteConfig.i18n.defaultLocale, // Awkward but ok
|
||||
});
|
||||
const i18n = await loadI18n({
|
||||
siteDir,
|
||||
config: context.siteConfig,
|
||||
currentLocale: context.siteConfig.i18n.defaultLocale, // Awkward but ok
|
||||
automaticBaseUrlLocalizationDisabled: false,
|
||||
});
|
||||
|
||||
const locales = cliOptions.locale ?? i18n.locales;
|
||||
|
||||
return orderLocales({
|
||||
locales: locales as [string, ...string[]],
|
||||
defaultLocale: siteConfig.i18n.defaultLocale,
|
||||
defaultLocale: i18n.defaultLocale,
|
||||
});
|
||||
}
|
||||
|
||||
|
|
|
|||
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue