feat(core): New siteConfig `future.experimental_vcs` API + `future.experimental_faster.gitEagerVcs` flag (#11512)
Some checks failed
Argos CI / take-screenshots (push) Has been cancelled
Build Hash Router / Build Hash Router (push) Has been cancelled
Canary Release / Publish Canary (push) Has been cancelled
CodeQL / Analyze (javascript) (push) Has been cancelled
Continuous Releases / Continuous Releases (push) Has been cancelled
E2E Tests / E2E — Yarn v1 (20) (push) Has been cancelled
E2E Tests / E2E — Yarn v1 (20.0) (push) Has been cancelled
E2E Tests / E2E — Yarn v1 (22) (push) Has been cancelled
E2E Tests / E2E — Yarn v1 (24) (push) Has been cancelled
E2E Tests / E2E — Yarn v1 (25.1) (push) Has been cancelled
E2E Tests / E2E — Yarn v1 Windows (push) Has been cancelled
E2E Tests / E2E — Yarn Berry (node-modules, -s) (push) Has been cancelled
E2E Tests / E2E — Yarn Berry (node-modules, -st) (push) Has been cancelled
E2E Tests / E2E — Yarn Berry (pnp, -s) (push) Has been cancelled
E2E Tests / E2E — Yarn Berry (pnp, -st) (push) Has been cancelled
E2E Tests / E2E — npm (push) Has been cancelled
E2E Tests / E2E — pnpm (push) Has been cancelled

Co-authored-by: slorber <749374+slorber@users.noreply.github.com>
This commit is contained in:
Sébastien Lorber 2025-11-14 18:15:45 +01:00 committed by GitHub
parent a24b8ad5ed
commit acc66c14b0
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
58 changed files with 2386 additions and 720 deletions

View File

@ -32,3 +32,5 @@ jobs:
run: yarn || yarn || yarn
- name: Build blog-only
run: yarn workspace website build:blogOnly
env:
DOCUSAURUS_PERF_LOGGER: 'true'

View File

@ -37,6 +37,7 @@ jobs:
- name: Build Hash Router
run: yarn build:website:fast
env:
DOCUSAURUS_PERF_LOGGER: 'true'
DOCUSAURUS_ROUTER: 'hash'
# Note: hash router + baseUrl do not play well together
# This would host at https://facebook.github.io/docusaurus/#/docusaurus/

View File

@ -62,6 +62,7 @@ jobs:
comment-key: DOCUSAURUS_INFRA_${{ matrix.DOCUSAURUS_INFRA }}
env:
DOCUSAURUS_SLOWER: ${{ matrix.DOCUSAURUS_INFRA == 'SLOWER' && 'true' || 'false' }}
DOCUSAURUS_PERF_LOGGER: 'true'
# Ensures build times stay under reasonable thresholds
build-time:
@ -88,6 +89,7 @@ jobs:
timeout-minutes: ${{ matrix.DOCUSAURUS_INFRA == 'SLOWER' && 3 || 2 }}
env:
DOCUSAURUS_SLOWER: ${{ matrix.DOCUSAURUS_INFRA == 'SLOWER' && 'true' || 'false' }}
DOCUSAURUS_PERF_LOGGER: 'true'
# Ensure build with a warm cache does not increase too much
- name: Build (warm cache)
@ -96,5 +98,6 @@ jobs:
timeout-minutes: ${{ matrix.DOCUSAURUS_INFRA == 'SLOWER' && 1 || 2 }}
env:
DOCUSAURUS_SLOWER: ${{ matrix.DOCUSAURUS_INFRA == 'SLOWER' && 'true' || 'false' }}
DOCUSAURUS_PERF_LOGGER: 'true'
# TODO post a GitHub comment with build with perf warnings?

View File

@ -54,6 +54,8 @@ jobs:
run: yarn workspace website test:swizzle:wrap:ts
- name: Docusaurus Build
run: yarn build:website:fast
env:
DOCUSAURUS_PERF_LOGGER: 'true'
- name: TypeCheck website
# see https://github.com/facebook/docusaurus/pull/10486

5
jest/deps.d.ts vendored
View File

@ -12,8 +12,3 @@ declare module 'to-vfile' {
export function read(path: string, encoding?: string): Promise<VFile>;
}
declare module '@testing-utils/git' {
const createTempRepo: typeof import('./utils/git').createTempRepo;
export {createTempRepo};
}

View File

@ -82,7 +82,7 @@ function normalizePaths<T>(value: T): T {
(val) => val.split(cwdReal).join('<PROJECT_ROOT>'),
(val) => val.split(cwd).join('<PROJECT_ROOT>'),
// Replace home directory with <TEMP_DIR>
// Replace temp directory with <TEMP_DIR>
(val) => val.split(tempDirReal).join('<TEMP_DIR>'),
(val) => val.split(tempDir).join('<TEMP_DIR>'),

63
jest/utils/git.ts vendored
View File

@ -1,63 +0,0 @@
/**
* Copyright (c) Facebook, Inc. and its affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree.
*/
import fs from 'fs-extra';
import os from 'os';
import path from 'path';
import shell from 'shelljs';
class Git {
constructor(private dir: string) {
const res = shell.exec('git init', {cwd: dir, silent: true});
if (res.code !== 0) {
throw new Error(`git init exited with code ${res.code}.
stderr: ${res.stderr}
stdout: ${res.stdout}`);
}
// Doesn't matter currently
shell.exec('git config user.email "test@jc-verse.com"', {
cwd: dir,
silent: true,
});
shell.exec('git config user.name "Test"', {cwd: dir, silent: true});
shell.exec('git commit --allow-empty -m "First commit"', {
cwd: dir,
silent: true,
});
}
commit(msg: string, date: string, author: string): void {
const addRes = shell.exec('git add .', {cwd: this.dir, silent: true});
const commitRes = shell.exec(
`git commit -m "${msg}" --date "${date}T00:00:00Z" --author "${author}"`,
{
cwd: this.dir,
env: {GIT_COMMITTER_DATE: `${date}T00:00:00Z`},
silent: true,
},
);
if (addRes.code !== 0) {
throw new Error(`git add exited with code ${addRes.code}.
stderr: ${addRes.stderr}
stdout: ${addRes.stdout}`);
}
if (commitRes.code !== 0) {
throw new Error(`git commit exited with code ${commitRes.code}.
stderr: ${commitRes.stderr}
stdout: ${commitRes.stdout}`);
}
}
}
// This function is sync so the same mock repo can be shared across tests
export function createTempRepo(): {repoDir: string; git: Git} {
const repoDir = fs.mkdtempSync(path.join(os.tmpdir(), 'git-test-repo'));
const git = new Git(repoDir);
return {repoDir, git};
}

View File

@ -25,7 +25,7 @@
"@docusaurus/logger": "3.9.2",
"@docusaurus/utils": "3.9.2",
"commander": "^5.1.0",
"execa": "5.1.1",
"execa": "^5.1.1",
"fs-extra": "^11.1.1",
"lodash": "^4.17.21",
"prompts": "^2.4.2",

View File

@ -8,7 +8,10 @@
import {jest} from '@jest/globals';
import path from 'path';
import fs from 'fs-extra';
import {DEFAULT_PARSE_FRONT_MATTER} from '@docusaurus/utils';
import {
DEFAULT_PARSE_FRONT_MATTER,
DEFAULT_VCS_CONFIG,
} from '@docusaurus/utils';
import {fromPartial} from '@total-typescript/shoehorn';
import {normalizePluginOptions} from '@docusaurus/utils-validation';
import tree from 'tree-node-cli';
@ -51,7 +54,7 @@ function getBlogContentPaths(siteDir: string): BlogContentPaths {
}
async function testGenerateFeeds(
context: LoadContext,
contextInput: LoadContext,
optionsInput: Options,
): Promise<void> {
const options = validateOptions({
@ -62,6 +65,17 @@ async function testGenerateFeeds(
options: optionsInput,
});
const context: LoadContext = {
...contextInput,
siteConfig: {
...contextInput.siteConfig,
future: {
...contextInput.siteConfig?.future,
experimental_vcs: DEFAULT_VCS_CONFIG,
},
},
};
const contentPaths = getBlogContentPaths(context.siteDir);
const authorsMap = await getAuthorsMap({
contentPaths,

View File

@ -8,12 +8,7 @@
import {jest} from '@jest/globals';
import * as path from 'path';
import {normalizePluginOptions} from '@docusaurus/utils-validation';
import {
posixPath,
getFileCommitDate,
LAST_UPDATE_FALLBACK,
getLocaleConfig,
} from '@docusaurus/utils';
import {posixPath, getLocaleConfig, TEST_VCS} from '@docusaurus/utils';
import {DEFAULT_FUTURE_CONFIG} from '@docusaurus/core/src/server/configValidation';
import pluginContentBlog from '../index';
import {validateOptions} from '../options';
@ -32,6 +27,10 @@ import type {
EditUrlFunction,
} from '@docusaurus/plugin-content-blog';
async function getFileCreationDate(filePath: string): Promise<Date> {
return new Date((await TEST_VCS.getFileCreationInfo(filePath)).timestamp);
}
const markdown: MarkdownConfig = {
format: 'mdx',
mermaid: true,
@ -561,9 +560,7 @@ describe('blog plugin', () => {
const blogPosts = await getBlogPosts(siteDir);
const noDateSource = path.posix.join('@site', PluginPath, 'no date.md');
const noDateSourceFile = path.posix.join(siteDir, PluginPath, 'no date.md');
// We know the file exists and we know we have git
const result = await getFileCommitDate(noDateSourceFile, {age: 'oldest'});
const noDateSourceTime = result.date;
const noDateSourceTime = await getFileCreationDate(noDateSourceFile);
expect({
...getByTitle(blogPosts, 'no date').metadata,
@ -674,29 +671,23 @@ describe('last update', () => {
);
const {blogPosts} = (await plugin.loadContent!())!;
const TestLastUpdate = await TEST_VCS.getFileLastUpdateInfo('any path');
expect(blogPosts[0]?.metadata.lastUpdatedBy).toBe('seb');
expect(blogPosts[0]?.metadata.lastUpdatedAt).toBe(
LAST_UPDATE_FALLBACK.lastUpdatedAt,
lastUpdateFor('2021-01-01'),
);
expect(blogPosts[1]?.metadata.lastUpdatedBy).toBe(
LAST_UPDATE_FALLBACK.lastUpdatedBy,
);
expect(blogPosts[1]?.metadata.lastUpdatedBy).toBe(TestLastUpdate.author);
expect(blogPosts[1]?.metadata.lastUpdatedAt).toBe(
LAST_UPDATE_FALLBACK.lastUpdatedAt,
lastUpdateFor('2021-01-01'),
);
expect(blogPosts[2]?.metadata.lastUpdatedBy).toBe('seb');
expect(blogPosts[2]?.metadata.lastUpdatedAt).toBe(
lastUpdateFor('2021-01-01'),
);
expect(blogPosts[2]?.metadata.lastUpdatedAt).toBe(TestLastUpdate.timestamp);
expect(blogPosts[3]?.metadata.lastUpdatedBy).toBe(
LAST_UPDATE_FALLBACK.lastUpdatedBy,
);
expect(blogPosts[3]?.metadata.lastUpdatedAt).toBe(
lastUpdateFor('2021-01-01'),
);
expect(blogPosts[3]?.metadata.lastUpdatedBy).toBe(TestLastUpdate.author);
expect(blogPosts[3]?.metadata.lastUpdatedAt).toBe(TestLastUpdate.timestamp);
});
it('time only', async () => {
@ -710,29 +701,27 @@ describe('last update', () => {
);
const {blogPosts} = (await plugin.loadContent!())!;
expect(blogPosts[0]?.metadata.title).toBe('Author');
const TestLastUpdate = await TEST_VCS.getFileLastUpdateInfo('any path');
expect(blogPosts[0]?.metadata.title).toBe('Both');
expect(blogPosts[0]?.metadata.lastUpdatedBy).toBeUndefined();
expect(blogPosts[0]?.metadata.lastUpdatedAt).toBe(
LAST_UPDATE_FALLBACK.lastUpdatedAt,
lastUpdateFor('2021-01-01'),
);
expect(blogPosts[1]?.metadata.title).toBe('Nothing');
expect(blogPosts[1]?.metadata.title).toBe('Last update date');
expect(blogPosts[1]?.metadata.lastUpdatedBy).toBeUndefined();
expect(blogPosts[1]?.metadata.lastUpdatedAt).toBe(
LAST_UPDATE_FALLBACK.lastUpdatedAt,
lastUpdateFor('2021-01-01'),
);
expect(blogPosts[2]?.metadata.title).toBe('Both');
expect(blogPosts[2]?.metadata.title).toBe('Author');
expect(blogPosts[2]?.metadata.lastUpdatedBy).toBeUndefined();
expect(blogPosts[2]?.metadata.lastUpdatedAt).toBe(
lastUpdateFor('2021-01-01'),
);
expect(blogPosts[2]?.metadata.lastUpdatedAt).toBe(TestLastUpdate.timestamp);
expect(blogPosts[3]?.metadata.title).toBe('Last update date');
expect(blogPosts[3]?.metadata.title).toBe('Nothing');
expect(blogPosts[3]?.metadata.lastUpdatedBy).toBeUndefined();
expect(blogPosts[3]?.metadata.lastUpdatedAt).toBe(
lastUpdateFor('2021-01-01'),
);
expect(blogPosts[3]?.metadata.lastUpdatedAt).toBe(TestLastUpdate.timestamp);
});
it('author only', async () => {
@ -746,20 +735,18 @@ describe('last update', () => {
);
const {blogPosts} = (await plugin.loadContent!())!;
const TestLastUpdate = await TEST_VCS.getFileLastUpdateInfo('any path');
expect(blogPosts[0]?.metadata.lastUpdatedBy).toBe('seb');
expect(blogPosts[0]?.metadata.lastUpdatedAt).toBeUndefined();
expect(blogPosts[1]?.metadata.lastUpdatedBy).toBe(
LAST_UPDATE_FALLBACK.lastUpdatedBy,
);
expect(blogPosts[1]?.metadata.lastUpdatedBy).toBe(TestLastUpdate.author);
expect(blogPosts[1]?.metadata.lastUpdatedAt).toBeUndefined();
expect(blogPosts[2]?.metadata.lastUpdatedBy).toBe('seb');
expect(blogPosts[2]?.metadata.lastUpdatedAt).toBeUndefined();
expect(blogPosts[3]?.metadata.lastUpdatedBy).toBe(
LAST_UPDATE_FALLBACK.lastUpdatedBy,
);
expect(blogPosts[3]?.metadata.lastUpdatedBy).toBe(TestLastUpdate.author);
expect(blogPosts[3]?.metadata.lastUpdatedAt).toBeUndefined();
});

View File

@ -19,7 +19,6 @@ import {
Globby,
groupTaggedItems,
getTagVisibility,
getFileCommitDate,
getContentPathList,
isUnlisted,
isDraft,
@ -225,6 +224,7 @@ async function processBlogSourceFile(
siteConfig: {
baseUrl,
markdown: {parseFrontMatter},
future: {experimental_vcs: vcs},
},
siteDir,
i18n,
@ -257,6 +257,7 @@ async function processBlogSourceFile(
blogSourceAbsolute,
options,
frontMatter.last_update,
vcs,
);
const draft = isDraft({frontMatter});
@ -285,17 +286,11 @@ async function processBlogSourceFile(
return parsedBlogFileName.date;
}
try {
const result = await getFileCommitDate(blogSourceAbsolute, {
age: 'oldest',
includeAuthor: false,
});
return result.date;
} catch (err) {
logger.warn(err);
const result = await vcs.getFileCreationInfo(blogSourceAbsolute);
if (result == null) {
return (await fs.stat(blogSourceAbsolute)).birthtime;
}
return new Date(result.timestamp);
}
const date = await getDate();

View File

@ -12,8 +12,8 @@ import {
createSlugger,
posixPath,
DEFAULT_PLUGIN_ID,
LAST_UPDATE_FALLBACK,
getLocaleConfig,
TEST_VCS,
} from '@docusaurus/utils';
import {getTagsFile} from '@docusaurus/utils-validation';
import {createSidebarsUtils} from '../sidebars/utils';
@ -529,8 +529,8 @@ describe('simple site', () => {
custom_edit_url: 'https://github.com/customUrl/docs/lorem.md',
unrelated_front_matter: "won't be part of metadata",
},
lastUpdatedAt: LAST_UPDATE_FALLBACK.lastUpdatedAt,
lastUpdatedBy: LAST_UPDATE_FALLBACK.lastUpdatedBy,
lastUpdatedAt: TEST_VCS.LAST_UPDATE_INFO.timestamp,
lastUpdatedBy: TEST_VCS.LAST_UPDATE_INFO.author,
tags: [],
unlisted: false,
});
@ -664,7 +664,7 @@ describe('simple site', () => {
},
title: 'Last Update Author Only',
},
lastUpdatedAt: LAST_UPDATE_FALLBACK.lastUpdatedAt,
lastUpdatedAt: TEST_VCS.LAST_UPDATE_INFO.timestamp,
lastUpdatedBy: 'Custom Author (processed by parseFrontMatter)',
sidebarPosition: undefined,
tags: [],

View File

@ -97,6 +97,7 @@ async function doProcessDocMetadata({
siteDir,
siteConfig: {
markdown: {parseFrontMatter},
future: {experimental_vcs: vcs},
},
} = context;
@ -125,6 +126,7 @@ async function doProcessDocMetadata({
filePath,
options,
lastUpdateFrontMatter,
vcs,
);
// E.g. api/plugins/myDoc -> myDoc; myDoc -> myDoc

View File

@ -8,6 +8,7 @@
import * as path from 'path';
import {fromPartial} from '@total-typescript/shoehorn';
import {DEFAULT_PARSE_FRONT_MATTER} from '@docusaurus/utils/src';
import {DEFAULT_VCS_CONFIG} from '@docusaurus/utils';
import {readVersionsMetadata} from '../version';
import {DEFAULT_OPTIONS} from '../../options';
import {loadVersion} from '../loadVersion';
@ -37,6 +38,9 @@ async function siteFixture(fixture: string) {
markdown: {
parseFrontMatter: DEFAULT_PARSE_FRONT_MATTER,
},
future: {
experimental_vcs: DEFAULT_VCS_CONFIG,
},
},
});

View File

@ -98,6 +98,7 @@ async function processPageSourceFile(
): Promise<Metadata | undefined> {
const {context, options, contentPaths} = params;
const {siteConfig, baseUrl, siteDir, i18n} = context;
const vcs = siteConfig.future.experimental_vcs;
const {editUrl} = options;
// Lookup in localized folder in priority
@ -180,6 +181,7 @@ async function processPageSourceFile(
source,
options,
frontMatter.last_update,
vcs,
);
if (isDraft({frontMatter})) {

View File

@ -6,12 +6,14 @@
*/
import {fromPartial} from '@total-typescript/shoehorn';
import {DEFAULT_VCS_CONFIG} from '@docusaurus/utils';
import createSitemap from '../createSitemap';
import type {PluginOptions} from '../options';
import type {DocusaurusConfig, RouteConfig} from '@docusaurus/types';
const siteConfig: DocusaurusConfig = fromPartial({
url: 'https://example.com',
future: {experimental_vcs: DEFAULT_VCS_CONFIG},
});
const options: PluginOptions = {

View File

@ -6,6 +6,7 @@
*/
import {fromPartial} from '@total-typescript/shoehorn';
import {TEST_VCS} from '@docusaurus/utils';
import {createSitemapItem} from '../createSitemapItem';
import {DEFAULT_OPTIONS} from '../options';
import type {PluginOptions} from '../options';
@ -13,6 +14,7 @@ import type {DocusaurusConfig, RouteConfig} from '@docusaurus/types';
const siteConfig: DocusaurusConfig = fromPartial({
url: 'https://example.com',
future: {experimental_vcs: TEST_VCS},
});
function test(params: {

View File

@ -6,16 +6,17 @@
*/
import {applyTrailingSlash} from '@docusaurus/utils-common';
import {getLastUpdate, normalizeUrl} from '@docusaurus/utils';
import {normalizeUrl} from '@docusaurus/utils';
import type {LastModOption, SitemapItem} from './types';
import type {DocusaurusConfig, RouteConfig} from '@docusaurus/types';
import type {DocusaurusConfig, RouteConfig, VcsConfig} from '@docusaurus/types';
import type {PluginOptions} from './options';
async function getRouteLastUpdatedAt(
route: RouteConfig,
vcs: Pick<VcsConfig, 'getFileLastUpdateInfo'>,
): Promise<number | null | undefined> {
// Important to bail-out early here
// This can lead to duplicated getLastUpdate() calls and performance problems
// This can lead to duplicated VCS calls and performance problems
// See https://github.com/facebook/docusaurus/pull/11211
if (route.metadata?.lastUpdatedAt === null) {
return null;
@ -24,8 +25,10 @@ async function getRouteLastUpdatedAt(
return route.metadata?.lastUpdatedAt;
}
if (route.metadata?.sourceFilePath) {
const lastUpdate = await getLastUpdate(route.metadata?.sourceFilePath);
return lastUpdate?.lastUpdatedAt ?? null;
const lastUpdateInfo = await vcs.getFileLastUpdateInfo(
route.metadata?.sourceFilePath,
);
return lastUpdateInfo?.timestamp ?? null;
}
return undefined;
@ -46,14 +49,16 @@ function formatLastmod(timestamp: number, lastmodOption: LastModOption) {
async function getRouteLastmod({
route,
lastmod,
vcs,
}: {
route: RouteConfig;
lastmod: LastModOption | null;
vcs: Pick<VcsConfig, 'getFileLastUpdateInfo'>;
}): Promise<string | null> {
if (lastmod === null) {
return null;
}
const lastUpdatedAt = (await getRouteLastUpdatedAt(route)) ?? null;
const lastUpdatedAt = (await getRouteLastUpdatedAt(route, vcs)) ?? null;
return lastUpdatedAt ? formatLastmod(lastUpdatedAt, lastmod) : null;
}
@ -77,6 +82,10 @@ export async function createSitemapItem({
]),
changefreq,
priority,
lastmod: await getRouteLastmod({route, lastmod}),
lastmod: await getRouteLastmod({
route,
lastmod,
vcs: siteConfig.future.experimental_vcs,
}),
};
}

View File

@ -33,6 +33,7 @@ export type FasterConfig = {
rspackBundler: boolean;
rspackPersistentCache: boolean;
ssgWorkerThreads: boolean;
gitEagerVcs: boolean;
};
export type FutureV4Config = {
@ -40,6 +41,53 @@ export type FutureV4Config = {
useCssCascadeLayers: boolean;
};
// VCS (Version Control System) info about a given change, e.g., a git commit.
// The agnostic term "VCS" is used instead of "git" to acknowledge the existence
// of other version control systems, and external systems like CMSs and i18n
// translation SaaS (e.g., Crowdin)
export type VcsChangeInfo = {timestamp: number; author: string};
export type VscInitializeParams = {
siteDir: string;
// TODO could it be useful to provide all plugins getPathsToWatch() here?
// this could give the opportunity to find out all VCS roots ahead of times
// this is mostly useful for multi-git-repo setups, can be added later
};
// VCS (Version Control System) config hooks to get file change info.
// This lets you override and customize the default Docusaurus behavior.
// This can be useful to optimize calls or when using something else than git
// See https://github.com/facebook/docusaurus/issues/11208
// See https://github.com/e18e/ecosystem-issues/issues/216
export type VcsConfig = {
/**
* Initialize the VCS system.
* This is notably useful to pre-read eagerly a full Git repository so that
* all the files first/last update info can be retrieved efficiently later
*
* Note: for now, this function is synchronous on purpose, it can be used to
* start warming up the VCS by reading eagerly, but we don't want to delay
* the rest of the Docusaurus start/build process. Instead of awaiting the
* init promise, you can create/store it and await it later during reads.
*
* @param params Initialization params that can be useful to warm up the VCS
*/
initialize: (params: VscInitializeParams) => void;
getFileCreationInfo: (filePath: string) => Promise<VcsChangeInfo | null>;
getFileLastUpdateInfo: (filePath: string) => Promise<VcsChangeInfo | null>;
};
/**
* List of pre-built VcsConfig that Docusaurus provides.
*/
export type VcsPreset =
| 'git-ad-hoc'
| 'git-eager'
| 'hardcoded'
| 'disabled'
| 'default-v1'
| 'default-v2';
export type FutureConfig = {
/**
* Turns v4 future flags on
@ -50,6 +98,8 @@ export type FutureConfig = {
experimental_storage: StorageConfig;
experimental_vcs: VcsConfig;
/**
* Docusaurus can work with 2 router types.
*
@ -367,6 +417,7 @@ export type Config = Overwrite<
{
v4?: boolean | Partial<FutureV4Config>;
experimental_faster?: boolean | Partial<FasterConfig>;
experimental_vcs?: VcsPreset | VcsConfig | boolean;
}
>;
}

View File

@ -13,6 +13,10 @@ export {
FutureV4Config,
FasterConfig,
StorageConfig,
VcsConfig,
VcsPreset,
VcsChangeInfo,
VscInitializeParams,
Config,
} from './config';

View File

@ -22,7 +22,7 @@
"@docusaurus/types": "3.9.2",
"@docusaurus/utils-common": "3.9.2",
"escape-string-regexp": "^4.0.0",
"execa": "5.1.1",
"execa": "^5.1.1",
"file-loader": "^6.2.0",
"fs-extra": "^11.1.1",
"github-slugger": "^1.5.0",

View File

@ -1 +0,0 @@
# Hoo hoo, if this path tricks you...

View File

@ -1,7 +0,0 @@
---
id: hello
title: Hello, World !
slug: /
---
Hello

View File

@ -1,159 +0,0 @@
/**
* Copyright (c) Facebook, Inc. and its affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree.
*/
import fs from 'fs-extra';
import path from 'path';
import {createTempRepo} from '@testing-utils/git';
import {FileNotTrackedError, getFileCommitDate} from '../gitUtils';
import {getGitLastUpdate} from '../lastUpdateUtils';
/* eslint-disable no-restricted-properties */
function initializeTempRepo() {
const {repoDir, git} = createTempRepo();
fs.writeFileSync(path.join(repoDir, 'test.txt'), 'Some content');
git.commit(
'Create test.txt',
'2020-06-19',
'Caroline <caroline@jc-verse.com>',
);
fs.writeFileSync(path.join(repoDir, 'test.txt'), 'Updated content');
git.commit(
'Update test.txt',
'2020-06-20',
'Josh-Cena <josh-cena@jc-verse.com>',
);
fs.writeFileSync(path.join(repoDir, 'test.txt'), 'Updated content (2)');
fs.writeFileSync(path.join(repoDir, 'moved.txt'), 'This file is moved');
git.commit(
'Update test.txt again, create moved.txt',
'2020-09-13',
'Caroline <caroline@jc-verse.com>',
);
fs.moveSync(path.join(repoDir, 'moved.txt'), path.join(repoDir, 'dest.txt'));
git.commit(
'Rename moved.txt to dest.txt',
'2020-11-13',
'Josh-Cena <josh-cena@jc-verse.com>',
);
fs.writeFileSync(path.join(repoDir, 'untracked.txt'), "I'm untracked");
return repoDir;
}
describe('getFileCommitDate', () => {
const repoDir = initializeTempRepo();
it('returns earliest commit date', async () => {
await expect(
getFileCommitDate(path.join(repoDir, 'test.txt'), {}),
).resolves.toEqual({
date: new Date('2020-06-19'),
timestamp: new Date('2020-06-19').getTime(),
});
await expect(
getFileCommitDate(path.join(repoDir, 'dest.txt'), {}),
).resolves.toEqual({
date: new Date('2020-09-13'),
timestamp: new Date('2020-09-13').getTime(),
});
});
it('returns latest commit date', async () => {
await expect(
getFileCommitDate(path.join(repoDir, 'test.txt'), {age: 'newest'}),
).resolves.toEqual({
date: new Date('2020-09-13'),
timestamp: new Date('2020-09-13').getTime(),
});
await expect(
getFileCommitDate(path.join(repoDir, 'dest.txt'), {age: 'newest'}),
).resolves.toEqual({
date: new Date('2020-11-13'),
timestamp: new Date('2020-11-13').getTime(),
});
});
it('returns latest commit date with author', async () => {
await expect(
getFileCommitDate(path.join(repoDir, 'test.txt'), {
age: 'oldest',
includeAuthor: true,
}),
).resolves.toEqual({
date: new Date('2020-06-19'),
timestamp: new Date('2020-06-19').getTime(),
author: 'Caroline',
});
await expect(
getFileCommitDate(path.join(repoDir, 'dest.txt'), {
age: 'oldest',
includeAuthor: true,
}),
).resolves.toEqual({
date: new Date('2020-09-13'),
timestamp: new Date('2020-09-13').getTime(),
author: 'Caroline',
});
});
it('returns earliest commit date with author', async () => {
await expect(
getFileCommitDate(path.join(repoDir, 'test.txt'), {
age: 'newest',
includeAuthor: true,
}),
).resolves.toEqual({
date: new Date('2020-09-13'),
timestamp: new Date('2020-09-13').getTime(),
author: 'Caroline',
});
await expect(
getFileCommitDate(path.join(repoDir, 'dest.txt'), {
age: 'newest',
includeAuthor: true,
}),
).resolves.toEqual({
date: new Date('2020-11-13'),
timestamp: new Date('2020-11-13').getTime(),
author: 'Josh-Cena',
});
});
it('throws custom error when file is not tracked', async () => {
await expect(() =>
getFileCommitDate(path.join(repoDir, 'untracked.txt'), {
age: 'newest',
includeAuthor: true,
}),
).rejects.toThrow(FileNotTrackedError);
});
it('throws when file not found', async () => {
await expect(() =>
getFileCommitDate(path.join(repoDir, 'nonexistent.txt'), {
age: 'newest',
includeAuthor: true,
}),
).rejects.toThrow(
/Failed to retrieve git history for ".*nonexistent.txt" because the file does not exist./,
);
});
it('multiple files not tracked by git', async () => {
const consoleMock = jest
.spyOn(console, 'warn')
.mockImplementation(() => {});
const tempFilePath1 = path.join(repoDir, 'file1.md');
const tempFilePath2 = path.join(repoDir, 'file2.md');
await fs.writeFile(tempFilePath1, 'Lorem ipsum :)');
await fs.writeFile(tempFilePath2, 'Lorem ipsum :)');
// TODO this is not the correct place to test "getGitLastUpdate"
await expect(getGitLastUpdate(tempFilePath1)).resolves.toBeNull();
await expect(getGitLastUpdate(tempFilePath2)).resolves.toBeNull();
expect(consoleMock).toHaveBeenCalledTimes(1);
expect(consoleMock).toHaveBeenLastCalledWith(
expect.stringMatching(/not tracked by git./),
);
await fs.unlink(tempFilePath1);
await fs.unlink(tempFilePath2);
});
});

View File

@ -5,162 +5,85 @@
* LICENSE file in the root directory of this source tree.
*/
import {jest} from '@jest/globals';
import fs from 'fs-extra';
import path from 'path';
import {createTempRepo} from '@testing-utils/git';
import execa from 'execa';
import {readLastUpdateData} from '../lastUpdateUtils';
import {TEST_VCS} from '../vcs/vcs';
import {
getGitLastUpdate,
LAST_UPDATE_FALLBACK,
LAST_UPDATE_UNTRACKED_GIT_FILEPATH,
readLastUpdateData,
} from '../lastUpdateUtils';
import type {FrontMatterLastUpdate} from '../lastUpdateUtils';
describe('getGitLastUpdate', () => {
const {repoDir} = createTempRepo();
const existingFilePath = path.join(
__dirname,
'__fixtures__/simple-site/hello.md',
);
it('existing test file in repository with Git timestamp', async () => {
const lastUpdateData = await getGitLastUpdate(existingFilePath);
expect(lastUpdateData).not.toBeNull();
const {lastUpdatedAt, lastUpdatedBy} = lastUpdateData!;
expect(lastUpdatedBy).not.toBeNull();
expect(typeof lastUpdatedBy).toBe('string');
expect(lastUpdatedAt).not.toBeNull();
expect(typeof lastUpdatedAt).toBe('number');
});
it('existing test file with spaces in path', async () => {
const filePathWithSpace = path.join(
__dirname,
'__fixtures__/simple-site/doc with space.md',
);
const lastUpdateData = await getGitLastUpdate(filePathWithSpace);
expect(lastUpdateData).not.toBeNull();
const {lastUpdatedBy, lastUpdatedAt} = lastUpdateData!;
expect(lastUpdatedBy).not.toBeNull();
expect(typeof lastUpdatedBy).toBe('string');
expect(lastUpdatedAt).not.toBeNull();
expect(typeof lastUpdatedAt).toBe('number');
});
it('non-existing file', async () => {
const consoleMock = jest
.spyOn(console, 'warn')
.mockImplementation(() => {});
const nonExistingFileName = '.nonExisting';
const nonExistingFilePath = path.join(
__dirname,
'__fixtures__',
nonExistingFileName,
);
await expect(getGitLastUpdate(nonExistingFilePath)).rejects.toThrow(
/An error occurred when trying to get the last update date/,
);
expect(consoleMock).toHaveBeenCalledTimes(0);
consoleMock.mockRestore();
});
it('git does not exist', async () => {
const mock = jest.spyOn(execa, 'sync').mockImplementationOnce(() => {
throw new Error('Git does not exist');
});
const consoleMock = jest
.spyOn(console, 'warn')
.mockImplementation(() => {});
const lastUpdateData = await getGitLastUpdate(existingFilePath);
expect(lastUpdateData).toBeNull();
expect(consoleMock).toHaveBeenLastCalledWith(
expect.stringMatching(
/.*\[WARNING\].* Sorry, the last update options require Git\..*/,
),
);
consoleMock.mockRestore();
mock.mockRestore();
});
it('temporary created file that is not tracked by git', async () => {
const consoleMock = jest
.spyOn(console, 'warn')
.mockImplementation(() => {});
const tempFilePath = path.join(repoDir, 'file.md');
await fs.writeFile(tempFilePath, 'Lorem ipsum :)');
await expect(getGitLastUpdate(tempFilePath)).resolves.toBeNull();
expect(consoleMock).toHaveBeenCalledTimes(1);
expect(consoleMock).toHaveBeenLastCalledWith(
expect.stringMatching(/not tracked by git./),
);
await fs.unlink(tempFilePath);
});
});
describe('readLastUpdateData', () => {
const testDate = '2021-01-01';
const testTimestamp = new Date(testDate).getTime();
const testAuthor = 'ozaki';
async function readData(
filePath: string,
options: Parameters<typeof readLastUpdateData>[1],
lastUpdateFrontMatter: Parameters<typeof readLastUpdateData>[2],
) {
return readLastUpdateData(
filePath,
options,
lastUpdateFrontMatter,
TEST_VCS,
);
}
describe('on untracked Git file', () => {
function test(lastUpdateFrontMatter: FrontMatterLastUpdate | undefined) {
return readLastUpdateData(
LAST_UPDATE_UNTRACKED_GIT_FILEPATH,
function readUntrackedFile(
lastUpdateFrontMatter: FrontMatterLastUpdate | undefined,
) {
return readData(
TEST_VCS.UNTRACKED_FILE_PATH,
{showLastUpdateAuthor: true, showLastUpdateTime: true},
lastUpdateFrontMatter,
);
}
it('reads null at/by from Git', async () => {
const {lastUpdatedAt, lastUpdatedBy} = await test({});
const {lastUpdatedAt, lastUpdatedBy} = await readUntrackedFile({});
expect(lastUpdatedAt).toBeNull();
expect(lastUpdatedBy).toBeNull();
});
it('reads null at from Git and author from front matter', async () => {
const {lastUpdatedAt, lastUpdatedBy} = await test({author: testAuthor});
const {lastUpdatedAt, lastUpdatedBy} = await readUntrackedFile({
author: testAuthor,
});
expect(lastUpdatedAt).toBeNull();
expect(lastUpdatedBy).toEqual(testAuthor);
});
it('reads null by from Git and date from front matter', async () => {
const {lastUpdatedAt, lastUpdatedBy} = await test({date: testDate});
const {lastUpdatedAt, lastUpdatedBy} = await readUntrackedFile({
date: testDate,
});
expect(lastUpdatedBy).toBeNull();
expect(lastUpdatedAt).toEqual(testTimestamp);
});
});
it('read last time show author time', async () => {
const {lastUpdatedAt, lastUpdatedBy} = await readLastUpdateData(
const {lastUpdatedAt, lastUpdatedBy} = await readData(
'',
{showLastUpdateAuthor: true, showLastUpdateTime: true},
{date: testDate},
);
expect(lastUpdatedAt).toEqual(testTimestamp);
expect(lastUpdatedBy).toBe(LAST_UPDATE_FALLBACK.lastUpdatedBy);
expect(lastUpdatedBy).toBe(TEST_VCS.LAST_UPDATE_INFO.author);
});
it('read last author show author time', async () => {
const {lastUpdatedAt, lastUpdatedBy} = await readLastUpdateData(
const {lastUpdatedAt, lastUpdatedBy} = await readData(
'',
{showLastUpdateAuthor: true, showLastUpdateTime: true},
{author: testAuthor},
);
expect(lastUpdatedBy).toEqual(testAuthor);
expect(lastUpdatedAt).toBe(LAST_UPDATE_FALLBACK.lastUpdatedAt);
expect(lastUpdatedAt).toBe(TEST_VCS.LAST_UPDATE_INFO.timestamp);
});
it('read last all show author time', async () => {
const {lastUpdatedAt, lastUpdatedBy} = await readLastUpdateData(
const {lastUpdatedAt, lastUpdatedBy} = await readData(
'',
{showLastUpdateAuthor: true, showLastUpdateTime: true},
{author: testAuthor, date: testDate},
@ -170,7 +93,7 @@ describe('readLastUpdateData', () => {
});
it('read last default show none', async () => {
const lastUpdate = await readLastUpdateData(
const lastUpdate = await readData(
'',
{showLastUpdateAuthor: false, showLastUpdateTime: false},
{},
@ -179,7 +102,7 @@ describe('readLastUpdateData', () => {
});
it('read last author show none', async () => {
const lastUpdate = await readLastUpdateData(
const lastUpdate = await readData(
'',
{showLastUpdateAuthor: false, showLastUpdateTime: false},
{author: testAuthor},
@ -188,17 +111,17 @@ describe('readLastUpdateData', () => {
});
it('read last time show author', async () => {
const {lastUpdatedAt, lastUpdatedBy} = await readLastUpdateData(
const {lastUpdatedAt, lastUpdatedBy} = await readData(
'',
{showLastUpdateAuthor: true, showLastUpdateTime: false},
{date: testDate},
);
expect(lastUpdatedBy).toBe(LAST_UPDATE_FALLBACK.lastUpdatedBy);
expect(lastUpdatedBy).toBe(TEST_VCS.LAST_UPDATE_INFO.author);
expect(lastUpdatedAt).toBeUndefined();
});
it('read last author show author', async () => {
const {lastUpdatedAt, lastUpdatedBy} = await readLastUpdateData(
const {lastUpdatedAt, lastUpdatedBy} = await readData(
'',
{showLastUpdateAuthor: true, showLastUpdateTime: false},
{author: testAuthor},
@ -208,17 +131,17 @@ describe('readLastUpdateData', () => {
});
it('read last default show author default', async () => {
const {lastUpdatedAt, lastUpdatedBy} = await readLastUpdateData(
const {lastUpdatedAt, lastUpdatedBy} = await readData(
'',
{showLastUpdateAuthor: true, showLastUpdateTime: false},
{},
);
expect(lastUpdatedBy).toBe(LAST_UPDATE_FALLBACK.lastUpdatedBy);
expect(lastUpdatedBy).toBe(TEST_VCS.LAST_UPDATE_INFO.author);
expect(lastUpdatedAt).toBeUndefined();
});
it('read last time show time', async () => {
const {lastUpdatedAt, lastUpdatedBy} = await readLastUpdateData(
const {lastUpdatedAt, lastUpdatedBy} = await readData(
'',
{showLastUpdateAuthor: false, showLastUpdateTime: true},
{date: testDate},
@ -228,17 +151,17 @@ describe('readLastUpdateData', () => {
});
it('read last author show time', async () => {
const {lastUpdatedAt, lastUpdatedBy} = await readLastUpdateData(
const {lastUpdatedAt, lastUpdatedBy} = await readData(
'',
{showLastUpdateAuthor: false, showLastUpdateTime: true},
{author: testAuthor},
);
expect(lastUpdatedBy).toBeUndefined();
expect(lastUpdatedAt).toEqual(LAST_UPDATE_FALLBACK.lastUpdatedAt);
expect(lastUpdatedAt).toEqual(TEST_VCS.LAST_UPDATE_INFO.timestamp);
});
it('read last author show time only - both front matter', async () => {
const {lastUpdatedAt, lastUpdatedBy} = await readLastUpdateData(
const {lastUpdatedAt, lastUpdatedBy} = await readData(
'',
{showLastUpdateAuthor: false, showLastUpdateTime: true},
{author: testAuthor, date: testDate},
@ -248,7 +171,7 @@ describe('readLastUpdateData', () => {
});
it('read last author show author only - both front matter', async () => {
const {lastUpdatedAt, lastUpdatedBy} = await readLastUpdateData(
const {lastUpdatedAt, lastUpdatedBy} = await readData(
'',
{showLastUpdateAuthor: true, showLastUpdateTime: false},
{author: testAuthor, date: testDate},

View File

@ -1,200 +0,0 @@
/**
* Copyright (c) Facebook, Inc. and its affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree.
*/
import path from 'path';
import fs from 'fs-extra';
import os from 'os';
import _ from 'lodash';
import execa from 'execa';
import PQueue from 'p-queue';
// Quite high/conservative concurrency value (it was previously "Infinity")
// See https://github.com/facebook/docusaurus/pull/10915
const DefaultGitCommandConcurrency =
// TODO Docusaurus v4: bump node, availableParallelism() now always exists
(typeof os.availableParallelism === 'function'
? os.availableParallelism()
: os.cpus().length) * 4;
const GitCommandConcurrencyEnv = process.env.DOCUSAURUS_GIT_COMMAND_CONCURRENCY
? parseInt(process.env.DOCUSAURUS_GIT_COMMAND_CONCURRENCY, 10)
: undefined;
const GitCommandConcurrency =
GitCommandConcurrencyEnv && GitCommandConcurrencyEnv > 0
? GitCommandConcurrencyEnv
: DefaultGitCommandConcurrency;
// We use a queue to avoid running too many concurrent Git commands at once
// See https://github.com/facebook/docusaurus/issues/10348
const GitCommandQueue = new PQueue({
concurrency: GitCommandConcurrency,
});
const realHasGitFn = () => {
try {
return execa.sync('git', ['--version']).exitCode === 0;
} catch (error) {
return false;
}
};
// The hasGit call is synchronous IO so we memoize it
// The user won't install Git in the middle of a build anyway...
const hasGit =
process.env.NODE_ENV === 'test' ? realHasGitFn : _.memoize(realHasGitFn);
/** Custom error thrown when git is not found in `PATH`. */
export class GitNotFoundError extends Error {}
/** Custom error thrown when the current file is not tracked by git. */
export class FileNotTrackedError extends Error {}
/**
* Fetches the git history of a file and returns a relevant commit date.
* It gets the commit date instead of author date so that amended commits
* can have their dates updated.
*
* @throws {@link GitNotFoundError} If git is not found in `PATH`.
* @throws {@link FileNotTrackedError} If the current file is not tracked by git.
* @throws Also throws when `git log` exited with non-zero, or when it outputs
* unexpected text.
*/
export async function getFileCommitDate(
/** Absolute path to the file. */
file: string,
args: {
/**
* `"oldest"` is the commit that added the file, following renames;
* `"newest"` is the last commit that edited the file.
*/
age?: 'oldest' | 'newest';
/** Use `includeAuthor: true` to get the author information as well. */
includeAuthor?: false;
},
): Promise<{
/** Relevant commit date. */
date: Date;
/** Timestamp returned from git, converted to **milliseconds**. */
timestamp: number;
}>;
/**
* Fetches the git history of a file and returns a relevant commit date.
* It gets the commit date instead of author date so that amended commits
* can have their dates updated.
*
* @throws {@link GitNotFoundError} If git is not found in `PATH`.
* @throws {@link FileNotTrackedError} If the current file is not tracked by git.
* @throws Also throws when `git log` exited with non-zero, or when it outputs
* unexpected text.
*/
export async function getFileCommitDate(
/** Absolute path to the file. */
file: string,
args: {
/**
* `"oldest"` is the commit that added the file, following renames;
* `"newest"` is the last commit that edited the file.
*/
age?: 'oldest' | 'newest';
includeAuthor: true;
},
): Promise<{
/** Relevant commit date. */
date: Date;
/** Timestamp returned from git, converted to **milliseconds**. */
timestamp: number;
/** The author's name, as returned from git. */
author: string;
}>;
export async function getFileCommitDate(
file: string,
{
age = 'oldest',
includeAuthor = false,
}: {
age?: 'oldest' | 'newest';
includeAuthor?: boolean;
},
): Promise<{
date: Date;
timestamp: number;
author?: string;
}> {
if (!hasGit()) {
throw new GitNotFoundError(
`Failed to retrieve git history for "${file}" because git is not installed.`,
);
}
if (!(await fs.pathExists(file))) {
throw new Error(
`Failed to retrieve git history for "${file}" because the file does not exist.`,
);
}
// We add a "RESULT:" prefix to make parsing easier
// See why: https://github.com/facebook/docusaurus/pull/10022
const resultFormat = includeAuthor ? 'RESULT:%ct,%an' : 'RESULT:%ct';
const args = [
`--format=${resultFormat}`,
'--max-count=1',
age === 'oldest' ? '--follow --diff-filter=A' : undefined,
]
.filter(Boolean)
.join(' ');
const command = `git -c log.showSignature=false log ${args} -- "${path.basename(
file,
)}"`;
const result = (await GitCommandQueue.add(() => {
return execa(command, {
cwd: path.dirname(file),
shell: true,
});
}))!;
if (result.exitCode !== 0) {
throw new Error(
`Failed to retrieve the git history for file "${file}" with exit code ${result.exitCode}: ${result.stderr}`,
);
}
// We only parse the output line starting with our "RESULT:" prefix
// See why https://github.com/facebook/docusaurus/pull/10022
const regex = includeAuthor
? /(?:^|\n)RESULT:(?<timestamp>\d+),(?<author>.+)(?:$|\n)/
: /(?:^|\n)RESULT:(?<timestamp>\d+)(?:$|\n)/;
const output = result.stdout.trim();
if (!output) {
throw new FileNotTrackedError(
`Failed to retrieve the git history for file "${file}" because the file is not tracked by git.`,
);
}
const match = output.match(regex);
if (!match) {
throw new Error(
`Failed to retrieve the git history for file "${file}" with unexpected output: ${output}`,
);
}
const timestampInSeconds = Number(match.groups!.timestamp);
const timestamp = timestampInSeconds * 1_000;
const date = new Date(timestamp);
if (includeAuthor) {
return {date, timestamp, author: match.groups!.author!};
}
return {date, timestamp};
}

View File

@ -25,10 +25,12 @@ export {
} from './constants';
export {generate, readOutputHTMLFile} from './emitUtils';
export {
// TODO Docusaurus v4: remove these legacy exports,
// they are only kept for retro-compatibility
getFileCommitDate,
FileNotTrackedError,
GitNotFoundError,
} from './gitUtils';
} from './vcs/gitUtils';
export {
mergeTranslations,
updateTranslationFileMessages,
@ -121,12 +123,11 @@ export {askPreferredLanguage} from './cliUtils';
export {flattenRoutes} from './routeUtils';
export {
getGitLastUpdate,
getLastUpdate,
readLastUpdateData,
LAST_UPDATE_FALLBACK,
type LastUpdateData,
type FrontMatterLastUpdate,
} from './lastUpdateUtils';
export {VcsPresetNames, getVcsPreset, TEST_VCS} from './vcs/vcs';
export {normalizeTags, reportInlineTags} from './tags';

View File

@ -6,13 +6,9 @@
*/
import _ from 'lodash';
import logger from '@docusaurus/logger';
import {
FileNotTrackedError,
GitNotFoundError,
getFileCommitDate,
} from './gitUtils';
import type {PluginOptions} from '@docusaurus/types';
import {getVcsPreset} from './vcs/vcs';
import type {PluginOptions, VcsConfig} from '@docusaurus/types';
export type LastUpdateData = {
/**
@ -29,72 +25,6 @@ export type LastUpdateData = {
lastUpdatedBy: string | undefined | null;
};
let showedGitRequirementError = false;
let showedFileNotTrackedError = false;
export async function getGitLastUpdate(
filePath: string,
): Promise<LastUpdateData | null> {
if (!filePath) {
return null;
}
// Wrap in try/catch in case the shell commands fail
// (e.g. project doesn't use Git, etc).
try {
const result = await getFileCommitDate(filePath, {
age: 'newest',
includeAuthor: true,
});
return {lastUpdatedAt: result.timestamp, lastUpdatedBy: result.author};
} catch (err) {
if (err instanceof GitNotFoundError) {
if (!showedGitRequirementError) {
logger.warn('Sorry, the last update options require Git.');
showedGitRequirementError = true;
}
} else if (err instanceof FileNotTrackedError) {
if (!showedFileNotTrackedError) {
logger.warn(
'Cannot infer the update date for some files, as they are not tracked by git.',
);
showedFileNotTrackedError = true;
}
} else {
throw new Error(
`An error occurred when trying to get the last update date`,
{cause: err},
);
}
return null;
}
}
export const LAST_UPDATE_FALLBACK: LastUpdateData = {
lastUpdatedAt: 1539502055000,
lastUpdatedBy: 'Author',
};
// Not proud of this, but convenient for tests :/
export const LAST_UPDATE_UNTRACKED_GIT_FILEPATH = `file/path/${Math.random()}.mdx`;
export async function getLastUpdate(
filePath: string,
): Promise<LastUpdateData | null> {
if (filePath === LAST_UPDATE_UNTRACKED_GIT_FILEPATH) {
return null;
}
if (
process.env.NODE_ENV !== 'production' ||
process.env.DOCUSAURUS_DISABLE_LAST_UPDATE === 'true'
) {
// Use fake data in dev/test for faster development.
return LAST_UPDATE_FALLBACK;
}
return getGitLastUpdate(filePath);
}
type LastUpdateOptions = Pick<
PluginOptions,
'showLastUpdateAuthor' | 'showLastUpdateTime'
@ -109,11 +39,21 @@ export type FrontMatterLastUpdate = {
date?: Date | string;
};
// TODO Docusaurus v4: refactor/rename, make it clear this fn is only
// for Markdown files with front matter shared by content plugin
export async function readLastUpdateData(
filePath: string,
options: LastUpdateOptions,
lastUpdateFrontMatter: FrontMatterLastUpdate | undefined,
vcsParam: Pick<VcsConfig, 'getFileLastUpdateInfo'>,
): Promise<LastUpdateData> {
// We fallback to the default VSC config at runtime on purpose
// It preserves retro-compatibility if a third-party plugin imports it
// This also ensures unit tests keep working without extra setup
// We still want to ensure type safety by requiring the VCS param
// TODO Docusaurus v4: refactor all these Git read APIs
const vcs = vcsParam ?? getVcsPreset('default-v1');
const {showLastUpdateAuthor, showLastUpdateTime} = options;
if (!showLastUpdateAuthor && !showLastUpdateTime) {
@ -128,14 +68,16 @@ export async function readLastUpdateData(
// We try to minimize git last update calls
// We call it at most once
// If all the data is provided as front matter, we do not call it
const getLastUpdateMemoized = _.memoize(() => getLastUpdate(filePath));
const getLastUpdateMemoized = _.memoize(() =>
vcs.getFileLastUpdateInfo(filePath),
);
const getLastUpdateBy = () =>
getLastUpdateMemoized().then((update) => {
// Important, see https://github.com/facebook/docusaurus/pull/11211
if (update === null) {
return null;
}
return update?.lastUpdatedBy;
return update?.author;
});
const getLastUpdateAt = () =>
getLastUpdateMemoized().then((update) => {
@ -143,7 +85,7 @@ export async function readLastUpdateData(
if (update === null) {
return null;
}
return update?.lastUpdatedAt;
return update?.timestamp;
});
const lastUpdatedBy = showLastUpdateAuthor

View File

@ -0,0 +1 @@
A site fixture with files versioned on Git.

View File

@ -0,0 +1 @@
Blog 1

View File

@ -0,0 +1,3 @@
This is a partial in file/folder starting with _:
It should be excluded by default

View File

@ -0,0 +1 @@
Doc with space in name

View File

@ -0,0 +1 @@
Doc 1

View File

@ -0,0 +1 @@
Doc 2

View File

@ -0,0 +1,723 @@
/**
* Copyright (c) Facebook, Inc. and its affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree.
*/
import fs from 'fs-extra';
import path from 'path';
import os from 'os';
import execa from 'execa';
import {
FileNotTrackedError,
getFileCommitDate,
getGitLastUpdate,
getGitCreation,
getGitRepoRoot,
getGitSuperProjectRoot,
getGitSubmodulePaths,
getGitAllRepoRoots,
getGitRepositoryFilesInfo,
} from '../gitUtils';
class Git {
private constructor(private dir: string) {
this.dir = dir;
}
private static async runOptimisticGitCommand({
cwd,
cmd,
args,
options,
}: {
cwd: string;
args: string[];
cmd: string;
options?: execa.Options;
}): Promise<execa.ExecaReturnValue> {
const res = await execa(cmd, args, {
cwd,
silent: true,
shell: true,
...options,
});
if (res.exitCode !== 0) {
throw new Error(
`Git command failed with code ${res.exitCode}: ${cmd} ${args.join(
' ',
)}`,
);
}
return res;
}
static async initializeRepo(dir: string): Promise<Git> {
await Git.runOptimisticGitCommand({
cmd: 'git',
args: ['init'],
cwd: dir,
});
await Git.runOptimisticGitCommand({
cmd: 'git',
args: ['config', 'user.email', '"test@example.com"'],
cwd: dir,
});
await Git.runOptimisticGitCommand({
cmd: 'git',
args: ['config', 'user.name', '"Test"'],
cwd: dir,
});
await Git.runOptimisticGitCommand({
cmd: 'git',
args: ['commit', '--allow-empty', '-m "First commit"'],
cwd: dir,
});
return new Git(dir);
}
async runOptimisticGitCommand(
cmd: string,
args?: string[],
options?: execa.Options,
): Promise<execa.ExecaReturnValue> {
return Git.runOptimisticGitCommand({cwd: this.dir, cmd, args, options});
}
async add(filePath: string): Promise<void> {
await this.runOptimisticGitCommand('git', ['add', filePath]);
}
async addAll(): Promise<void> {
await this.runOptimisticGitCommand('git', ['add', '.']);
}
async commit(msg: string, date: string, author: string): Promise<void> {
await this.runOptimisticGitCommand(
`git`,
[
'commit',
`-m "${msg}"`,
`--date "${date}T00:00:00Z"`,
`--author "${author}"`,
],
{env: {GIT_COMMITTER_DATE: `${date}T00:00:00Z`}},
);
}
async commitFile(
filePath: string,
{
fileContent,
commitMessage,
commitDate,
commitAuthor,
}: {
fileContent?: string;
commitMessage?: string;
commitDate?: string;
commitAuthor?: string;
} = {},
): Promise<void> {
await fs.ensureDir(path.join(this.dir, path.dirname(filePath)));
await fs.writeFile(
path.join(this.dir, filePath),
fileContent ?? `Content of ${filePath}`,
);
await this.add(filePath);
await this.commit(
commitMessage ?? `Create ${filePath}`,
commitDate ?? '2020-06-19',
commitAuthor ?? 'Seb <seb@example.com>',
);
}
async addSubmodule(name: string, repoPath: string): Promise<void> {
return this.runOptimisticGitCommand('git', [
'-c protocol.file.allow=always',
'submodule',
'add',
repoPath,
name,
]);
}
async defineSubmodules(submodules: {[name: string]: string}): Promise<void> {
for (const entry of Object.entries(submodules)) {
await this.addSubmodule(entry[0], entry[1]);
}
await this.runOptimisticGitCommand('git', [
'submodule',
'update',
'--init',
'--recursive',
]);
}
}
async function createGitRepoEmpty(): Promise<{repoDir: string; git: Git}> {
let repoDir = await fs.mkdtemp(path.join(os.tmpdir(), 'git-test-repo'));
repoDir = await fs.realpath.native(repoDir);
const git = await Git.initializeRepo(repoDir);
return {repoDir, git};
}
describe('commit info APIs', () => {
async function createGitRepoTestFixture() {
const {repoDir, git} = await createGitRepoEmpty();
await git.commitFile('test.txt', {
fileContent: 'Some content',
commitMessage: 'Create test.txt',
commitDate: '2020-06-19',
commitAuthor: 'Caroline <caroline@example.com>',
});
await git.commitFile('test.txt', {
fileContent: 'Updated content',
commitMessage: 'Update test.txt',
commitDate: '2020-06-20',
commitAuthor: 'Josh-Cena <josh-cena@example.com>',
});
await fs.writeFile(path.join(repoDir, 'test.txt'), 'Updated content (2)');
await fs.writeFile(path.join(repoDir, 'moved.txt'), 'This file is moved');
await git.addAll();
await git.commit(
'Update test.txt again, create moved.txt',
'2020-09-13',
'Robert <robert@example.com>',
);
await fs.move(
path.join(repoDir, 'moved.txt'),
path.join(repoDir, 'dest.txt'),
);
await git.addAll();
await git.commit(
'Rename moved.txt to dest.txt',
'2020-11-13',
'Seb <seb@example.com>',
);
await fs.writeFile(path.join(repoDir, 'untracked.txt'), "I'm untracked");
return repoDir;
}
// Create the repo only once for all tests => faster tests
const repoDirPromise = createGitRepoTestFixture();
describe('getFileCommitDate', () => {
it('returns latest commit date with author', async () => {
const repoDir = await repoDirPromise;
await expect(
getFileCommitDate(path.join(repoDir, 'test.txt'), {
age: 'oldest',
includeAuthor: true,
}),
).resolves.toEqual({
date: new Date('2020-06-19'),
timestamp: new Date('2020-06-19').getTime(),
author: 'Caroline',
});
await expect(
getFileCommitDate(path.join(repoDir, 'dest.txt'), {
age: 'oldest',
includeAuthor: true,
}),
).resolves.toEqual({
date: new Date('2020-09-13'),
timestamp: new Date('2020-09-13').getTime(),
author: 'Robert',
});
});
it('returns earliest commit date with author', async () => {
const repoDir = await repoDirPromise;
await expect(
getFileCommitDate(path.join(repoDir, 'test.txt'), {
age: 'newest',
includeAuthor: true,
}),
).resolves.toEqual({
date: new Date('2020-09-13'),
timestamp: new Date('2020-09-13').getTime(),
author: 'Robert',
});
await expect(
getFileCommitDate(path.join(repoDir, 'dest.txt'), {
age: 'newest',
includeAuthor: true,
}),
).resolves.toEqual({
date: new Date('2020-11-13'),
timestamp: new Date('2020-11-13').getTime(),
author: 'Seb',
});
});
it('throws custom error when file is not tracked', async () => {
const repoDir = await repoDirPromise;
await expect(() =>
getFileCommitDate(path.join(repoDir, 'untracked.txt'), {
age: 'newest',
includeAuthor: true,
}),
).rejects.toThrow(FileNotTrackedError);
});
it('throws when file not found', async () => {
const repoDir = await createGitRepoTestFixture();
await expect(() =>
getFileCommitDate(path.join(repoDir, 'nonexistent.txt'), {
age: 'newest',
includeAuthor: true,
}),
).rejects.toThrow(
/Failed to retrieve git history for ".*nonexistent.txt" because the file does not exist./,
);
});
});
describe('commit info APIs', () => {
it('returns creation info for test.txt', async () => {
const repoDir = await repoDirPromise;
const filePath = path.join(repoDir, 'test.txt');
await expect(getGitCreation(filePath)).resolves.toEqual({
author: 'Caroline',
timestamp: new Date('2020-06-19').getTime(),
});
await expect(getGitLastUpdate(filePath)).resolves.toEqual({
author: 'Robert',
timestamp: new Date('2020-09-13').getTime(),
});
});
it('returns creation info for dest.txt', async () => {
const repoDir = await repoDirPromise;
const filePath = path.join(repoDir, 'dest.txt');
await expect(getGitCreation(filePath)).resolves.toEqual({
author: 'Robert',
timestamp: new Date('2020-09-13').getTime(),
});
await expect(getGitLastUpdate(filePath)).resolves.toEqual({
author: 'Seb',
timestamp: new Date('2020-11-13').getTime(),
});
});
it('returns creation info for untracked.txt', async () => {
const repoDir = await repoDirPromise;
const filePath = path.join(repoDir, 'untracked.txt');
await expect(getGitCreation(filePath)).resolves.toEqual(null);
await expect(getGitLastUpdate(filePath)).resolves.toEqual(null);
});
it('returns creation info for non-existing.txt', async () => {
const repoDir = await repoDirPromise;
const filePath = path.join(repoDir, 'non-existing.txt');
await expect(
getGitCreation(filePath),
).rejects.toThrowErrorMatchingInlineSnapshot(
`"An error occurred when trying to get the last update date"`,
);
await expect(
getGitLastUpdate(filePath),
).rejects.toThrowErrorMatchingInlineSnapshot(
`"An error occurred when trying to get the last update date"`,
);
});
it('returns files info', async () => {
const repoDir = await repoDirPromise;
await expect(getGitRepositoryFilesInfo(repoDir)).resolves
.toMatchInlineSnapshot(`
Map {
"dest.txt" => {
"creation": {
"author": "Seb",
"timestamp": 1605225600000,
},
"lastUpdate": {
"author": "Seb",
"timestamp": 1605225600000,
},
},
"moved.txt" => {
"creation": {
"author": "Robert",
"timestamp": 1599955200000,
},
"lastUpdate": {
"author": "Robert",
"timestamp": 1599955200000,
},
},
"test.txt" => {
"creation": {
"author": "Caroline",
"timestamp": 1592524800000,
},
"lastUpdate": {
"author": "Robert",
"timestamp": 1599955200000,
},
},
}
`);
});
});
});
describe('getGitRepoRoot', () => {
async function initTestRepo() {
const {repoDir, git} = await createGitRepoEmpty();
await git.commitFile('subDir/test.txt');
return repoDir;
}
// Create the repo only once for all tests => faster tests
const repoDirPromise = initTestRepo();
it('returns repoDir for cwd=repoDir', async () => {
const repoDir = await repoDirPromise;
const cwd = repoDir;
await expect(getGitRepoRoot(cwd)).resolves.toEqual(repoDir);
});
it('returns repoDir for cwd=repoDir/subDir', async () => {
const repoDir = await repoDirPromise;
const cwd = path.join(repoDir, 'subDir');
await expect(getGitRepoRoot(cwd)).resolves.toEqual(repoDir);
});
it('returns Docusaurus repo for cwd=__dirname', async () => {
const cwd = __dirname;
await expect(getGitRepoRoot(cwd)).resolves.toMatch(/docusaurus$/);
});
it('rejects for cwd=repoDir/doesNotExist', async () => {
const repoDir = await repoDirPromise;
const cwd = path.join(repoDir, 'doesNotExist');
await expect(getGitRepoRoot(cwd)).rejects.toThrow(
/Couldn't find the git repository root directory/,
);
});
});
describe('submodules APIs', () => {
async function initTestRepo() {
const superproject = await createGitRepoEmpty();
await superproject.git.commitFile('README.md');
await superproject.git.commitFile('website/docs/myDoc.md');
const submodule1 = await createGitRepoEmpty();
await submodule1.git.commitFile('file1.txt');
const submodule2 = await createGitRepoEmpty();
await submodule2.git.commitFile('subDir/file2.txt');
await superproject.git.defineSubmodules({
'submodules/submodule1': submodule1.repoDir,
'submodules/submodule2': submodule2.repoDir,
});
return {superproject, submodule1, submodule2};
}
// Create the repo only once for all tests => faster tests
const repoPromise = initTestRepo();
describe('getGitSuperProjectRoot', () => {
it('returns superproject dir for cwd=superproject', async () => {
const repo = await repoPromise;
const cwd = path.join(repo.superproject.repoDir);
await expect(getGitSuperProjectRoot(cwd)).resolves.toEqual(
repo.superproject.repoDir,
);
});
it('returns superproject dir for cwd=superproject/submodules', async () => {
const repo = await repoPromise;
const cwd = path.join(repo.superproject.repoDir, 'submodules');
await expect(getGitSuperProjectRoot(cwd)).resolves.toEqual(
repo.superproject.repoDir,
);
});
it('returns superproject dir for cwd=superproject/website/docs', async () => {
const repo = await repoPromise;
const cwd = path.join(repo.superproject.repoDir, 'website/docs');
await expect(getGitSuperProjectRoot(cwd)).resolves.toEqual(
repo.superproject.repoDir,
);
});
it('returns superproject dir for cwd=submodule1', async () => {
const repo = await repoPromise;
const cwd = path.join(repo.superproject.repoDir, 'submodules/submodule1');
await expect(getGitSuperProjectRoot(cwd)).resolves.toEqual(
repo.superproject.repoDir,
);
});
it('returns superproject dir for cwd=submodule2', async () => {
const repo = await initTestRepo();
const cwd = path.join(repo.superproject.repoDir, 'submodules/submodule2');
await expect(getGitSuperProjectRoot(cwd)).resolves.toEqual(
repo.superproject.repoDir,
);
});
it('returns superproject dir for cwd=submodule2/subDir', async () => {
const repo = await repoPromise;
const cwd = path.join(
repo.superproject.repoDir,
'submodules/submodule2/subDir',
);
await expect(getGitSuperProjectRoot(cwd)).resolves.toEqual(
repo.superproject.repoDir,
);
});
it('rejects for cwd of untracked dir', async () => {
const cwd = await os.tmpdir();
// Do we really want this to throw?
// Not sure, and Git doesn't help us failsafe and return null...
await expect(getGitSuperProjectRoot(cwd)).rejects
.toThrowErrorMatchingInlineSnapshot(`
"Couldn't find the git superproject root directory
Failure while running \`git rev-parse --show-superproject-working-tree\` from cwd="<TEMP_DIR>"
The command executed throws an error: Command failed with exit code 128: git rev-parse --show-superproject-working-tree
fatal: not a git repository (or any of the parent directories): .git"
`);
});
});
describe('getGitSubmodulePaths', () => {
it('returns submodules for cwd=superproject', async () => {
const repo = await repoPromise;
const cwd = path.join(repo.superproject.repoDir);
await expect(getGitSubmodulePaths(cwd)).resolves.toEqual([
'submodules/submodule1',
'submodules/submodule2',
]);
});
it('returns submodules for cwd=superproject/website/docs', async () => {
const repo = await repoPromise;
const cwd = path.join(repo.superproject.repoDir, 'website', 'docs');
await expect(getGitSubmodulePaths(cwd)).resolves.toEqual([
// The returned paths are relative to CWD,
// Not sure if it's the best behavior.
// But you'd rather call this with the superproject root as CWD anyway!
'../../submodules/submodule1',
'../../submodules/submodule2',
]);
});
it('returns [] for cwd=submodules/submodule1', async () => {
const repo = await repoPromise;
const cwd = path.join(
repo.superproject.repoDir,
'submodules',
'submodule1',
);
await expect(getGitSubmodulePaths(cwd)).resolves.toEqual([]);
});
it('returns [] for cwd=submodules/submodule2/subDir', async () => {
const repo = await repoPromise;
const cwd = path.join(
repo.superproject.repoDir,
'submodules',
'submodule2',
'subDir',
);
await expect(getGitSubmodulePaths(cwd)).resolves.toEqual([]);
});
it('rejects for cwd=doesNotExist', async () => {
const repo = await repoPromise;
const cwd = path.join(repo.superproject.repoDir, 'doesNotExist');
await expect(getGitSubmodulePaths(cwd)).rejects.toThrow(
/Couldn't read the list of git submodules/,
);
});
it('rejects for cwd=notTracked', async () => {
const cwd = await os.tmpdir();
await expect(getGitSubmodulePaths(cwd)).rejects.toThrow(
/Couldn't read the list of git submodules/,
);
});
});
describe('getGitAllRepoRoots', () => {
it('returns root paths for cwd=superproject', async () => {
const repo = await repoPromise;
const cwd = path.join(repo.superproject.repoDir);
await expect(getGitAllRepoRoots(cwd)).resolves.toEqual([
repo.superproject.repoDir,
path.join(repo.superproject.repoDir, 'submodules', 'submodule1'),
path.join(repo.superproject.repoDir, 'submodules', 'submodule2'),
]);
});
it('returns root paths for cwd=superproject/website/docs', async () => {
const repo = await repoPromise;
const cwd = path.join(repo.superproject.repoDir, 'website', 'docs');
await expect(getGitAllRepoRoots(cwd)).resolves.toEqual([
repo.superproject.repoDir,
path.join(repo.superproject.repoDir, 'submodules', 'submodule1'),
path.join(repo.superproject.repoDir, 'submodules', 'submodule2'),
]);
});
it('returns root paths for cwd=superproject/submodules', async () => {
const repo = await repoPromise;
const cwd = path.join(repo.superproject.repoDir, 'submodules');
await expect(getGitAllRepoRoots(cwd)).resolves.toEqual([
repo.superproject.repoDir,
path.join(repo.superproject.repoDir, 'submodules', 'submodule1'),
path.join(repo.superproject.repoDir, 'submodules', 'submodule2'),
]);
});
it('returns root paths for cwd=superproject/submodules/submodule1', async () => {
const repo = await repoPromise;
const cwd = path.join(
repo.superproject.repoDir,
'submodules',
'submodule1',
);
await expect(getGitAllRepoRoots(cwd)).resolves.toEqual([
repo.superproject.repoDir,
path.join(repo.superproject.repoDir, 'submodules', 'submodule1'),
path.join(repo.superproject.repoDir, 'submodules', 'submodule2'),
]);
});
it('returns root paths for cwd=superproject/submodules/submodule2/subDir', async () => {
const repo = await repoPromise;
const cwd = path.join(
repo.superproject.repoDir,
'submodules',
'submodule2',
'subDir',
);
await expect(getGitAllRepoRoots(cwd)).resolves.toEqual([
repo.superproject.repoDir,
path.join(repo.superproject.repoDir, 'submodules', 'submodule1'),
path.join(repo.superproject.repoDir, 'submodules', 'submodule2'),
]);
});
it('rejects for cwd=doesNotExist', async () => {
const repo = await repoPromise;
const cwd = path.join(repo.superproject.repoDir, 'doesNotExist');
await expect(getGitAllRepoRoots(cwd)).rejects.toThrow(
/Could not get all the git repository root paths/,
);
});
it('rejects for cwd=notTracked', async () => {
const cwd = await os.tmpdir();
await expect(getGitAllRepoRoots(cwd)).rejects.toThrow(
/Could not get all the git repository root paths/,
);
});
});
describe('getGitRepositoryFilesInfo', () => {
it('for superproject', async () => {
const repo = await repoPromise;
const cwd = path.join(repo.superproject.repoDir);
await expect(getGitRepositoryFilesInfo(cwd)).resolves
.toMatchInlineSnapshot(`
Map {
"website/docs/myDoc.md" => {
"creation": {
"author": "Seb",
"timestamp": 1592524800000,
},
"lastUpdate": {
"author": "Seb",
"timestamp": 1592524800000,
},
},
"README.md" => {
"creation": {
"author": "Seb",
"timestamp": 1592524800000,
},
"lastUpdate": {
"author": "Seb",
"timestamp": 1592524800000,
},
},
}
`);
});
it('for submodule1', async () => {
const repo = await repoPromise;
const cwd = path.join(
repo.superproject.repoDir,
'submodules',
'submodule1',
);
await expect(getGitRepositoryFilesInfo(cwd)).resolves
.toMatchInlineSnapshot(`
Map {
"file1.txt" => {
"creation": {
"author": "Seb",
"timestamp": 1592524800000,
},
"lastUpdate": {
"author": "Seb",
"timestamp": 1592524800000,
},
},
}
`);
});
it('for submodule2', async () => {
const repo = await repoPromise;
const cwd = path.join(
repo.superproject.repoDir,
'submodules',
'submodule2',
);
await expect(getGitRepositoryFilesInfo(cwd)).resolves
.toMatchInlineSnapshot(`
Map {
"subDir/file2.txt" => {
"creation": {
"author": "Seb",
"timestamp": 1592524800000,
},
"lastUpdate": {
"author": "Seb",
"timestamp": 1592524800000,
},
},
}
`);
});
});
});

View File

@ -0,0 +1,524 @@
/**
* Copyright (c) Facebook, Inc. and its affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree.
*/
import path from 'path';
import fs from 'fs-extra';
import os from 'os';
import _ from 'lodash';
import execa from 'execa';
import PQueue from 'p-queue';
import logger from '@docusaurus/logger';
// Quite high/conservative concurrency value (it was previously "Infinity")
// See https://github.com/facebook/docusaurus/pull/10915
const DefaultGitCommandConcurrency =
// TODO Docusaurus v4: bump node, availableParallelism() now always exists
(typeof os.availableParallelism === 'function'
? os.availableParallelism()
: os.cpus().length) * 4;
const GitCommandConcurrencyEnv = process.env.DOCUSAURUS_GIT_COMMAND_CONCURRENCY
? parseInt(process.env.DOCUSAURUS_GIT_COMMAND_CONCURRENCY, 10)
: undefined;
const GitCommandConcurrency =
GitCommandConcurrencyEnv && GitCommandConcurrencyEnv > 0
? GitCommandConcurrencyEnv
: DefaultGitCommandConcurrency;
// We use a queue to avoid running too many concurrent Git commands at once
// See https://github.com/facebook/docusaurus/issues/10348
const GitCommandQueue = new PQueue({
concurrency: GitCommandConcurrency,
});
const realHasGitFn = () => {
try {
return execa.sync('git', ['--version']).exitCode === 0;
} catch (error) {
return false;
}
};
// The hasGit call is synchronous IO so we memoize it
// The user won't install Git in the middle of a build anyway...
const hasGit =
process.env.NODE_ENV === 'test' ? realHasGitFn : _.memoize(realHasGitFn);
// TODO Docusaurus v4: remove this
// Exceptions are not made for control flow logic
/** Custom error thrown when git is not found in `PATH`. */
export class GitNotFoundError extends Error {}
// TODO Docusaurus v4: remove this, only kept for retro-compatibility
// Exceptions are not made for control flow logic
/** Custom error thrown when the current file is not tracked by git. */
export class FileNotTrackedError extends Error {}
/**
* Fetches the git history of a file and returns a relevant commit date.
* It gets the commit date instead of author date so that amended commits
* can have their dates updated.
*
* @throws {@link GitNotFoundError} If git is not found in `PATH`.
* @throws {@link FileNotTrackedError} If the current file is not tracked by git.
* @throws Also throws when `git log` exited with non-zero, or when it outputs
* unexpected text.
*/
export async function getFileCommitDate(
/** Absolute path to the file. */
file: string,
args: {
/**
* `"oldest"` is the commit that added the file, following renames;
* `"newest"` is the last commit that edited the file.
*/
age?: 'oldest' | 'newest';
/** Use `includeAuthor: true` to get the author information as well. */
includeAuthor?: false;
},
): Promise<{
/** Relevant commit date. */
date: Date; // TODO duplicate data, not really useful?
/** Timestamp returned from git, converted to **milliseconds**. */
timestamp: number;
}>;
/**
* Fetches the git history of a file and returns a relevant commit date.
* It gets the commit date instead of author date so that amended commits
* can have their dates updated.
*
* @throws {@link GitNotFoundError} If git is not found in `PATH`.
* @throws {@link FileNotTrackedError} If the current file is not tracked by git.
* @throws Also throws when `git log` exited with non-zero, or when it outputs
* unexpected text.
*/
export async function getFileCommitDate(
/** Absolute path to the file. */
file: string,
args: {
/**
* `"oldest"` is the commit that added the file, following renames;
* `"newest"` is the last commit that edited the file.
*/
age?: 'oldest' | 'newest';
includeAuthor: true;
},
): Promise<{
/** Relevant commit date. */
date: Date;
/** Timestamp returned from git, converted to **milliseconds**. */
timestamp: number;
/** The author's name, as returned from git. */
author: string;
}>;
export async function getFileCommitDate(
file: string,
{
age = 'oldest',
includeAuthor = false,
}: {
age?: 'oldest' | 'newest';
includeAuthor?: boolean;
},
): Promise<{
date: Date;
timestamp: number;
author?: string;
}> {
if (!hasGit()) {
throw new GitNotFoundError(
`Failed to retrieve git history for "${file}" because git is not installed.`,
);
}
if (!(await fs.pathExists(file))) {
throw new Error(
`Failed to retrieve git history for "${file}" because the file does not exist.`,
);
}
// We add a "RESULT:" prefix to make parsing easier
// See why: https://github.com/facebook/docusaurus/pull/10022
const resultFormat = includeAuthor ? 'RESULT:%ct,%an' : 'RESULT:%ct';
const args = [
`--format=${resultFormat}`,
'--max-count=1',
age === 'oldest' ? '--follow --diff-filter=A' : undefined,
]
.filter(Boolean)
.join(' ');
// Do not include GPG signature in the log output
// See https://github.com/facebook/docusaurus/pull/10022
const command = `git -c log.showSignature=false log ${args} -- "${path.basename(
file,
)}"`;
const result = (await GitCommandQueue.add(() => {
return execa(command, {
cwd: path.dirname(file),
shell: true,
});
}))!;
if (result.exitCode !== 0) {
throw new Error(
`Failed to retrieve the git history for file "${file}" with exit code ${result.exitCode}: ${result.stderr}`,
);
}
// We only parse the output line starting with our "RESULT:" prefix
// See why https://github.com/facebook/docusaurus/pull/10022
const regex = includeAuthor
? /(?:^|\n)RESULT:(?<timestamp>\d+),(?<author>.+)(?:$|\n)/
: /(?:^|\n)RESULT:(?<timestamp>\d+)(?:$|\n)/;
const output = result.stdout.trim();
if (!output) {
throw new FileNotTrackedError(
`Failed to retrieve the git history for file "${file}" because the file is not tracked by git.`,
);
}
const match = output.match(regex);
if (!match) {
throw new Error(
`Failed to retrieve the git history for file "${file}" with unexpected output: ${output}`,
);
}
const timestampInSeconds = Number(match.groups!.timestamp);
const timestamp = timestampInSeconds * 1_000;
const date = new Date(timestamp);
if (includeAuthor) {
return {date, timestamp, author: match.groups!.author!};
}
return {date, timestamp};
}
let showedGitRequirementError = false;
let showedFileNotTrackedError = false;
type GitCommitInfo = {timestamp: number; author: string};
async function getGitCommitInfo(
filePath: string,
age: 'oldest' | 'newest',
): Promise<GitCommitInfo | null> {
if (!filePath) {
return null;
}
// Wrap in try/catch in case the shell commands fail
// (e.g. project doesn't use Git, etc).
try {
const result = await getFileCommitDate(filePath, {
age,
includeAuthor: true,
});
return {timestamp: result.timestamp, author: result.author};
} catch (err) {
// TODO legacy perf issue: do not use exceptions for control flow!
if (err instanceof GitNotFoundError) {
if (!showedGitRequirementError) {
logger.warn('Sorry, the last update options require Git.');
showedGitRequirementError = true;
}
} else if (err instanceof FileNotTrackedError) {
if (!showedFileNotTrackedError) {
logger.warn(
'Cannot infer the update date for some files, as they are not tracked by git.',
);
showedFileNotTrackedError = true;
}
} else {
throw new Error(
`An error occurred when trying to get the last update date`,
{cause: err},
);
}
return null;
}
}
export async function getGitLastUpdate(
filePath: string,
): Promise<GitCommitInfo | null> {
return getGitCommitInfo(filePath, 'newest');
}
export async function getGitCreation(
filePath: string,
): Promise<GitCommitInfo | null> {
return getGitCommitInfo(filePath, 'oldest');
}
export async function getGitRepoRoot(cwd: string): Promise<string> {
const createErrorMessageBase = () => {
return `Couldn't find the git repository root directory
Failure while running ${logger.code(
'git rev-parse --show-toplevel',
)} from cwd=${logger.path(cwd)}`;
};
const result = await execa('git', ['rev-parse', '--show-toplevel'], {
cwd,
}).catch((error) => {
// We enter this rejection when cwd is not a dir for example
throw new Error(
`${createErrorMessageBase()}
The command executed throws an error: ${error.message}`,
{cause: error},
);
});
if (result.exitCode !== 0) {
throw new Error(
`${createErrorMessageBase()}
The command returned exit code ${logger.code(result.exitCode)}: ${logger.subdue(
result.stderr,
)}`,
);
}
return fs.realpath.native(result.stdout.trim());
}
// A Git "superproject" is a Git repository that contains submodules
// See https://git-scm.com/docs/git-rev-parse#Documentation/git-rev-parse.txt---show-superproject-working-tree
// See https://git-scm.com/book/en/v2/Git-Tools-Submodules
export async function getGitSuperProjectRoot(
cwd: string,
): Promise<string | null> {
const createErrorMessageBase = () => {
return `Couldn't find the git superproject root directory
Failure while running ${logger.code(
'git rev-parse --show-superproject-working-tree',
)} from cwd=${logger.path(cwd)}`;
};
const result = await execa(
'git',
['rev-parse', '--show-superproject-working-tree'],
{
cwd,
},
).catch((error) => {
// We enter this rejection when cwd is not a dir for example
throw new Error(
`${createErrorMessageBase()}
The command executed throws an error: ${error.message}`,
{cause: error},
);
});
if (result.exitCode !== 0) {
throw new Error(
`${createErrorMessageBase()}
The command returned exit code ${logger.code(result.exitCode)}: ${logger.subdue(
result.stderr,
)}`,
);
}
const output = result.stdout.trim();
// this command only works when inside submodules
// otherwise it doesn't return anything when we are inside the main repo
if (output) {
return fs.realpath.native(output);
}
return getGitRepoRoot(cwd);
}
// See https://git-scm.com/book/en/v2/Git-Tools-Submodules
export async function getGitSubmodulePaths(cwd: string): Promise<string[]> {
const createErrorMessageBase = () => {
return `Couldn't read the list of git submodules
Failure while running ${logger.code(
'git submodule status',
)} from cwd=${logger.path(cwd)}`;
};
const result = await execa('git', ['submodule', 'status'], {
cwd,
}).catch((error) => {
// We enter this rejection when cwd is not a dir for example
throw new Error(
`${createErrorMessageBase()}
The command executed throws an error: ${error.message}`,
{cause: error},
);
});
if (result.exitCode !== 0) {
throw new Error(
`${createErrorMessageBase()}
The command returned exit code ${logger.code(result.exitCode)}: ${logger.subdue(
result.stderr,
)}`,
);
}
const output = result.stdout.trim();
if (!output) {
return [];
}
/* The output may contain a space/-/+/U prefix, for example
1234567e3e35d1f5b submodules/foo (heads/main)
-9ab1f1d3a2d77b0a4 submodules/bar (heads/dev)
+f00ba42e1b3ddead submodules/baz (remotes/origin/main)
Udeadbeefcafe1234 submodules/qux
*/
const getSubmodulePath = async (line: string) => {
const submodulePath = line.substring(1).split(' ')[1];
if (!submodulePath) {
throw new Error(`Failed to parse git submodule line: ${line}`);
}
return submodulePath;
};
return Promise.all(output.split('\n').map(getSubmodulePath));
}
// Find the root git repository alongside all its submodules, if any
export async function getGitAllRepoRoots(cwd: string): Promise<string[]> {
try {
const superProjectRoot = await getGitSuperProjectRoot(cwd);
if (!superProjectRoot) {
return [];
}
let submodulePaths = await getGitSubmodulePaths(superProjectRoot);
submodulePaths = await Promise.all(
submodulePaths.map((submodulePath) =>
fs.realpath.native(path.resolve(superProjectRoot, submodulePath)),
),
);
return [superProjectRoot, ...submodulePaths];
} catch (error) {
throw new Error(
`Could not get all the git repository root paths (superproject + submodules) from cwd=${cwd}`,
{cause: error},
);
}
}
// Useful information about a file tracked in a Git repository
export type GitFileInfo = {
creation: GitCommitInfo;
lastUpdate: GitCommitInfo;
};
// A map of all the files tracked in a Git repository
export type GitFileInfoMap = Map<string, GitFileInfo>;
// Logic inspired from Astro Starlight:
// See https://bsky.app/profile/bluwy.me/post/3lyihod6qos2a
// See https://github.com/withastro/starlight/blob/c417f1efd463be63b7230617d72b120caed098cd/packages/starlight/utils/git.ts#L58
export async function getGitRepositoryFilesInfo(
cwd: string,
): Promise<GitFileInfoMap> {
// git --no-pager -c log.showSignature=false log --format=t:%ct,a:%an --name-status
const result = await execa(
'git',
[
'--no-pager',
// Do not include GPG signature in the log output
// See https://github.com/facebook/docusaurus/pull/10022
'-c',
'log.showSignature=false',
// The git command we want to run
'log',
// Format each history entry as t:<seconds since epoch>
'--format=t:%ct,a:%an',
// In each entry include the name and status for each modified file
'--name-status',
// For creation info, should we use --follow --find-renames=100% ???
],
{
cwd,
encoding: 'utf-8',
// TODO use streaming to avoid a large buffer
// See https://github.com/withastro/starlight/issues/3154
maxBuffer: 20 * 1024 * 1024,
},
);
if (result.exitCode !== 0) {
throw new Error(
`Docusaurus failed to run the 'git log' to retrieve tracked files last update date/author.
The command exited with code ${result.exitCode}: ${result.stderr}`,
);
}
const logLines = result.stdout.split('\n');
const now = Date.now();
// TODO not fail-fast
let runningDate = now;
let runningAuthor = 'N/A';
const runningMap: GitFileInfoMap = new Map();
for (const logLine of logLines) {
if (logLine.startsWith('t:')) {
// t:<timestamp>,a:<author name>
const [timestampStr, authorStr] = logLine.split(',') as [string, string];
const timestamp = Number.parseInt(timestampStr.slice(2), 10) * 1000;
const author = authorStr.slice(2);
runningDate = timestamp;
runningAuthor = author;
}
// TODO the code below doesn't handle delete/move/rename operations properly
// it returns files that no longer exist in the repo (deleted/moved)
// - Added files take the format `A\t<file>`
// - Modified files take the format `M\t<file>`
// - Deleted files take the format `D\t<file>`
// - Renamed files take the format `R<count>\t<old>\t<new>`
// - Copied files take the format `C<count>\t<old>\t<new>`
// The name of the file as of the commit being processed is always
// the last part of the log line.
const tabSplit = logLine.lastIndexOf('\t');
if (tabSplit === -1) {
continue;
}
const relativeFile = logLine.slice(tabSplit + 1);
const currentFileInfo = runningMap.get(relativeFile);
const currentCreationTime = currentFileInfo?.creation.timestamp || now;
const newCreationTime = Math.min(currentCreationTime, runningDate);
const newCreation: GitCommitInfo =
!currentFileInfo || newCreationTime !== currentCreationTime
? {timestamp: newCreationTime, author: runningAuthor}
: currentFileInfo.creation;
const currentLastUpdateTime = currentFileInfo?.lastUpdate.timestamp || 0;
const newLastUpdateTime = Math.max(currentLastUpdateTime, runningDate);
const newLastUpdate: GitCommitInfo =
!currentFileInfo || newLastUpdateTime !== currentLastUpdateTime
? {timestamp: newLastUpdateTime, author: runningAuthor}
: currentFileInfo.lastUpdate;
runningMap.set(relativeFile, {
creation: newCreation,
lastUpdate: newLastUpdate,
});
}
return runningMap;
}

View File

@ -0,0 +1,54 @@
/**
* Copyright (c) Facebook, Inc. and its affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree.
*/
import {
VCS_HARDCODED_CREATION_INFO,
VCS_HARDCODED_LAST_UPDATE_INFO,
VCS_HARDCODED_UNTRACKED_FILE_PATH,
VcsHardcoded,
} from './vcsHardcoded';
import {VcsGitAdHoc} from './vcsGitAdHoc';
import {VscGitEager} from './vcsGitEager';
import {VcsDisabled} from './vcsDisabled';
import {VcsDefaultV1} from './vcsDefaultV1';
import {VcsDefaultV2} from './vcsDefaultV2';
import type {VcsConfig, VcsPreset} from '@docusaurus/types';
const VcsPresets: Record<VcsPreset, VcsConfig> = {
'git-ad-hoc': VcsGitAdHoc,
'git-eager': VscGitEager,
hardcoded: VcsHardcoded,
disabled: VcsDisabled,
'default-v1': VcsDefaultV1,
'default-v2': VcsDefaultV2,
};
export const VcsPresetNames = Object.keys(VcsPresets) as VcsPreset[];
export function findVcsPreset(presetName: string): VcsConfig | undefined {
return VcsPresets[presetName as VcsPreset];
}
export function getVcsPreset(presetName: VcsPreset): VcsConfig {
const vcs = findVcsPreset(presetName);
if (vcs) {
return vcs;
} else {
throw new Error(
`Unknown Docusaurus VCS preset name: ${process.env.DOCUSAURUS_VCS}`,
);
}
}
// Convenient export for writing unit tests depending on VCS
export const TEST_VCS = {
CREATION_INFO: VCS_HARDCODED_CREATION_INFO,
LAST_UPDATE_INFO: VCS_HARDCODED_LAST_UPDATE_INFO,
UNTRACKED_FILE_PATH: VCS_HARDCODED_UNTRACKED_FILE_PATH,
...VcsHardcoded,
};

View File

@ -0,0 +1,33 @@
/**
* Copyright (c) Facebook, Inc. and its affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree.
*/
import {VcsHardcoded} from './vcsHardcoded';
import {VcsGitAdHoc} from './vcsGitAdHoc';
import type {VcsConfig} from '@docusaurus/types';
function getDynamicStrategy(): VcsConfig {
return process.env.NODE_ENV === 'development' ||
process.env.NODE_ENV === 'test'
? VcsHardcoded
: VcsGitAdHoc;
}
/**
* This VCS implements the historical Git automatic strategy.
* It is only enabled in production mode, using ad-hoc git log commands.
*/
export const VcsDefaultV1: VcsConfig = {
initialize: (...params) => {
return getDynamicStrategy().initialize(...params);
},
getFileCreationInfo: (...params) => {
return getDynamicStrategy().getFileCreationInfo(...params);
},
getFileLastUpdateInfo: (...params) => {
return getDynamicStrategy().getFileLastUpdateInfo(...params);
},
};

View File

@ -0,0 +1,33 @@
/**
* Copyright (c) Facebook, Inc. and its affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree.
*/
import {VcsHardcoded} from './vcsHardcoded';
import {VscGitEager} from './vcsGitEager';
import type {VcsConfig} from '@docusaurus/types';
function getStrategy(): VcsConfig {
return process.env.NODE_ENV === 'development' ||
process.env.NODE_ENV === 'test'
? VcsHardcoded
: VscGitEager;
}
/**
* This VCS implements the new eager Git automatic strategy.
* It is only enabled in production mode, reading the git repository eagerly.
*/
export const VcsDefaultV2: VcsConfig = {
initialize: (...params) => {
return getStrategy().initialize(...params);
},
getFileCreationInfo: (...params) => {
return getStrategy().getFileCreationInfo(...params);
},
getFileLastUpdateInfo: (...params) => {
return getStrategy().getFileLastUpdateInfo(...params);
},
};

View File

@ -0,0 +1,25 @@
/**
* Copyright (c) Facebook, Inc. and its affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree.
*/
import type {VcsConfig} from '@docusaurus/types';
/**
* This VCS implementation always returns null values
*/
export const VcsDisabled: VcsConfig = {
initialize: () => {
// Noop
},
getFileCreationInfo: async (_filePath) => {
return null;
},
getFileLastUpdateInfo: async (_ilePath) => {
return null;
},
};

View File

@ -0,0 +1,30 @@
/**
* Copyright (c) Facebook, Inc. and its affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree.
*/
import {getGitLastUpdate, getGitCreation} from './gitUtils';
import type {VcsConfig} from '@docusaurus/types';
/**
* A VCS strategy to query Git information in an ad-hoc way.
* This is the default/historical Docusaurus Git VCS implementation.
* Unfortunately, it is a major bottleneck for large sites/repositories.
*
* See also https://github.com/facebook/docusaurus/issues/11208
*/
export const VcsGitAdHoc: VcsConfig = {
initialize: () => {
// Nothing to do here for the default/historical Git implementation
},
getFileCreationInfo: async (filePath: string) => {
return getGitCreation(filePath);
},
getFileLastUpdateInfo: async (filePath: string) => {
return getGitLastUpdate(filePath);
},
};

View File

@ -0,0 +1,99 @@
/**
* Copyright (c) Facebook, Inc. and its affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree.
*/
import {resolve, basename} from 'node:path';
import logger, {PerfLogger} from '@docusaurus/logger';
import {getGitAllRepoRoots, getGitRepositoryFilesInfo} from './gitUtils';
import type {GitFileInfo, GitFileInfoMap} from './gitUtils';
import type {VcsConfig} from '@docusaurus/types';
// The Map keys should be absolute file paths, not relative Git paths
function resolveFileInfoMapPaths(
repoRoot: string,
filesInfo: GitFileInfoMap,
): GitFileInfoMap {
function transformMapEntry(
entry: [string, GitFileInfo],
): [string, GitFileInfo] {
// We just resolve the Git paths that are relative to the repo root
return [resolve(repoRoot, entry[0]), entry[1]];
}
return new Map(Array.from(filesInfo.entries()).map(transformMapEntry));
}
function mergeFileMaps(fileMaps: GitFileInfoMap[]): GitFileInfoMap {
return new Map(fileMaps.flatMap((m) => [...m]));
}
async function loadAllGitFilesInfoMap(cwd: string): Promise<GitFileInfoMap> {
const roots = await PerfLogger.async('Reading Git root dirs', () =>
getGitAllRepoRoots(cwd),
);
const allMaps: GitFileInfoMap[] = await Promise.all(
roots.map(async (root) => {
const map = await PerfLogger.async(
`Reading Git history for repo ${logger.path(basename(root))}`,
() => getGitRepositoryFilesInfo(root),
);
return resolveFileInfoMapPaths(root, map);
}),
);
return mergeFileMaps(allMaps);
}
function createGitVcsConfig(): VcsConfig {
let filesMapPromise: Promise<GitFileInfoMap> | null = null;
async function getGitFileInfo(filePath: string): Promise<GitFileInfo | null> {
const filesMap = await filesMapPromise;
return filesMap?.get(filePath) ?? null;
}
return {
initialize: ({siteDir}) => {
if (filesMapPromise) {
// We only initialize this VCS once!
// For i18n sites, this permits reading ahead of time for all locales
// so that it only slows down the first locale
// I assume this logic is fine, but we'll see if it causes trouble
// Note: we could also only call "initialize()" once from the outside,
// But maybe it could be useful for custom VCS implementations to be
// able to initialize once per locale?
PerfLogger.log(
'Git Eager VCS strategy already initialized, skipping re-initialization',
);
return;
}
filesMapPromise = PerfLogger.async('Git Eager VCS init', () =>
loadAllGitFilesInfoMap(siteDir),
);
filesMapPromise.catch((error) => {
console.error(
'Failed to initialize the Docusaurus Git Eager VCS strategy',
error,
);
});
},
getFileCreationInfo: async (filePath: string) => {
const fileInfo = await getGitFileInfo(filePath);
return fileInfo?.creation ?? null;
},
getFileLastUpdateInfo: async (filePath: string) => {
const fileInfo = await getGitFileInfo(filePath);
return fileInfo?.lastUpdate ?? null;
},
};
}
export const VscGitEager: VcsConfig = createGitVcsConfig();

View File

@ -0,0 +1,45 @@
/**
* Copyright (c) Facebook, Inc. and its affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree.
*/
import type {VcsConfig, VcsChangeInfo} from '@docusaurus/types';
export const VCS_HARDCODED_CREATION_INFO: VcsChangeInfo = {
timestamp: 1490997600000, // 1st Apr 2017
author: 'Creator',
};
export const VCS_HARDCODED_LAST_UPDATE_INFO: VcsChangeInfo = {
timestamp: 1539502055000, // 14th Oct 2018
author: 'Author',
};
export const VCS_HARDCODED_UNTRACKED_FILE_PATH = `file/path/${Math.random()}.mdx`;
/**
* This VCS implementation always returns hardcoded values for testing purposes.
* It is also useful in dev environments where VCS info is not important.
* Reading information from the VCS can be slow and is not always necessary.
*/
export const VcsHardcoded: VcsConfig = {
initialize: () => {
// Noop
},
getFileCreationInfo: async (filePath: string) => {
if (filePath === VCS_HARDCODED_UNTRACKED_FILE_PATH) {
return null;
}
return VCS_HARDCODED_CREATION_INFO;
},
getFileLastUpdateInfo: async (filePath: string) => {
if (filePath === VCS_HARDCODED_UNTRACKED_FILE_PATH) {
return null;
}
return VCS_HARDCODED_LAST_UPDATE_INFO;
},
};

View File

@ -51,7 +51,7 @@
"escape-html": "^1.0.3",
"eta": "^2.2.0",
"eval": "^0.1.8",
"execa": "5.1.1",
"execa": "^5.1.1",
"fs-extra": "^11.1.1",
"html-tags": "^3.3.1",
"html-webpack-plugin": "^5.6.0",

View File

@ -9,7 +9,7 @@ import fs from 'fs-extra';
import logger, {PerfLogger} from '@docusaurus/logger';
import {mapAsyncSequential} from '@docusaurus/utils';
import {type LoadContextParams} from '../../server/site';
import {loadI18nLocaleList} from '../../server/i18n';
import {getLocaleList} from '../../server/i18n';
import {buildLocale, type BuildLocaleParams} from './buildLocale';
import {loadSiteConfig} from '../../server/config';
@ -88,7 +88,7 @@ async function getLocalesToBuild({
const locales =
cliOptions.locale ??
loadI18nLocaleList({
getLocaleList({
i18nConfig: siteConfig.i18n,
currentLocale: siteConfig.i18n.defaultLocale, // Awkward but ok
});

View File

@ -36,6 +36,7 @@ export type BuildLocaleParams = {
};
const SkipBundling = process.env.DOCUSAURUS_SKIP_BUNDLING === 'true';
const ReturnAfterLoading = process.env.DOCUSAURUS_RETURN_AFTER_LOADING === 'true';
const ExitAfterLoading = process.env.DOCUSAURUS_EXIT_AFTER_LOADING === 'true';
const ExitAfterBundling = process.env.DOCUSAURUS_EXIT_AFTER_BUNDLING === 'true';
@ -61,6 +62,9 @@ export async function buildLocale({
}),
);
if (ReturnAfterLoading) {
return;
}
if (ExitAfterLoading) {
return process.exit(0);
}

View File

@ -9,6 +9,7 @@ exports[`loadSiteConfig website with .cjs siteConfig 1`] = `
"customFields": {},
"future": {
"experimental_faster": {
"gitEagerVcs": false,
"lightningCssMinimizer": false,
"mdxCrossCompilerCache": false,
"rspackBundler": false,
@ -23,6 +24,11 @@ exports[`loadSiteConfig website with .cjs siteConfig 1`] = `
"namespace": false,
"type": "localStorage",
},
"experimental_vcs": {
"getFileCreationInfo": [Function],
"getFileLastUpdateInfo": [Function],
"initialize": [Function],
},
"v4": {
"removeLegacyPostBuildHeadAttribute": false,
"useCssCascadeLayers": false,
@ -88,6 +94,7 @@ exports[`loadSiteConfig website with ts + js config 1`] = `
"customFields": {},
"future": {
"experimental_faster": {
"gitEagerVcs": false,
"lightningCssMinimizer": false,
"mdxCrossCompilerCache": false,
"rspackBundler": false,
@ -102,6 +109,11 @@ exports[`loadSiteConfig website with ts + js config 1`] = `
"namespace": false,
"type": "localStorage",
},
"experimental_vcs": {
"getFileCreationInfo": [Function],
"getFileLastUpdateInfo": [Function],
"initialize": [Function],
},
"v4": {
"removeLegacyPostBuildHeadAttribute": false,
"useCssCascadeLayers": false,
@ -167,6 +179,7 @@ exports[`loadSiteConfig website with valid JS CJS config 1`] = `
"customFields": {},
"future": {
"experimental_faster": {
"gitEagerVcs": false,
"lightningCssMinimizer": false,
"mdxCrossCompilerCache": false,
"rspackBundler": false,
@ -181,6 +194,11 @@ exports[`loadSiteConfig website with valid JS CJS config 1`] = `
"namespace": false,
"type": "localStorage",
},
"experimental_vcs": {
"getFileCreationInfo": [Function],
"getFileLastUpdateInfo": [Function],
"initialize": [Function],
},
"v4": {
"removeLegacyPostBuildHeadAttribute": false,
"useCssCascadeLayers": false,
@ -246,6 +264,7 @@ exports[`loadSiteConfig website with valid JS ESM config 1`] = `
"customFields": {},
"future": {
"experimental_faster": {
"gitEagerVcs": false,
"lightningCssMinimizer": false,
"mdxCrossCompilerCache": false,
"rspackBundler": false,
@ -260,6 +279,11 @@ exports[`loadSiteConfig website with valid JS ESM config 1`] = `
"namespace": false,
"type": "localStorage",
},
"experimental_vcs": {
"getFileCreationInfo": [Function],
"getFileLastUpdateInfo": [Function],
"initialize": [Function],
},
"v4": {
"removeLegacyPostBuildHeadAttribute": false,
"useCssCascadeLayers": false,
@ -325,6 +349,7 @@ exports[`loadSiteConfig website with valid TypeScript CJS config 1`] = `
"customFields": {},
"future": {
"experimental_faster": {
"gitEagerVcs": false,
"lightningCssMinimizer": false,
"mdxCrossCompilerCache": false,
"rspackBundler": false,
@ -339,6 +364,11 @@ exports[`loadSiteConfig website with valid TypeScript CJS config 1`] = `
"namespace": false,
"type": "localStorage",
},
"experimental_vcs": {
"getFileCreationInfo": [Function],
"getFileLastUpdateInfo": [Function],
"initialize": [Function],
},
"v4": {
"removeLegacyPostBuildHeadAttribute": false,
"useCssCascadeLayers": false,
@ -404,6 +434,7 @@ exports[`loadSiteConfig website with valid TypeScript ESM config 1`] = `
"customFields": {},
"future": {
"experimental_faster": {
"gitEagerVcs": false,
"lightningCssMinimizer": false,
"mdxCrossCompilerCache": false,
"rspackBundler": false,
@ -418,6 +449,11 @@ exports[`loadSiteConfig website with valid TypeScript ESM config 1`] = `
"namespace": false,
"type": "localStorage",
},
"experimental_vcs": {
"getFileCreationInfo": [Function],
"getFileLastUpdateInfo": [Function],
"initialize": [Function],
},
"v4": {
"removeLegacyPostBuildHeadAttribute": false,
"useCssCascadeLayers": false,
@ -483,6 +519,7 @@ exports[`loadSiteConfig website with valid async config 1`] = `
"customFields": {},
"future": {
"experimental_faster": {
"gitEagerVcs": false,
"lightningCssMinimizer": false,
"mdxCrossCompilerCache": false,
"rspackBundler": false,
@ -497,6 +534,11 @@ exports[`loadSiteConfig website with valid async config 1`] = `
"namespace": false,
"type": "localStorage",
},
"experimental_vcs": {
"getFileCreationInfo": [Function],
"getFileLastUpdateInfo": [Function],
"initialize": [Function],
},
"v4": {
"removeLegacyPostBuildHeadAttribute": false,
"useCssCascadeLayers": false,
@ -564,6 +606,7 @@ exports[`loadSiteConfig website with valid async config creator function 1`] = `
"customFields": {},
"future": {
"experimental_faster": {
"gitEagerVcs": false,
"lightningCssMinimizer": false,
"mdxCrossCompilerCache": false,
"rspackBundler": false,
@ -578,6 +621,11 @@ exports[`loadSiteConfig website with valid async config creator function 1`] = `
"namespace": false,
"type": "localStorage",
},
"experimental_vcs": {
"getFileCreationInfo": [Function],
"getFileLastUpdateInfo": [Function],
"initialize": [Function],
},
"v4": {
"removeLegacyPostBuildHeadAttribute": false,
"useCssCascadeLayers": false,
@ -645,6 +693,7 @@ exports[`loadSiteConfig website with valid config creator function 1`] = `
"customFields": {},
"future": {
"experimental_faster": {
"gitEagerVcs": false,
"lightningCssMinimizer": false,
"mdxCrossCompilerCache": false,
"rspackBundler": false,
@ -659,6 +708,11 @@ exports[`loadSiteConfig website with valid config creator function 1`] = `
"namespace": false,
"type": "localStorage",
},
"experimental_vcs": {
"getFileCreationInfo": [Function],
"getFileLastUpdateInfo": [Function],
"initialize": [Function],
},
"v4": {
"removeLegacyPostBuildHeadAttribute": false,
"useCssCascadeLayers": false,
@ -729,6 +783,7 @@ exports[`loadSiteConfig website with valid siteConfig 1`] = `
"favicon": "img/docusaurus.ico",
"future": {
"experimental_faster": {
"gitEagerVcs": false,
"lightningCssMinimizer": false,
"mdxCrossCompilerCache": false,
"rspackBundler": false,
@ -743,6 +798,11 @@ exports[`loadSiteConfig website with valid siteConfig 1`] = `
"namespace": false,
"type": "localStorage",
},
"experimental_vcs": {
"getFileCreationInfo": [Function],
"getFileLastUpdateInfo": [Function],
"initialize": [Function],
},
"v4": {
"removeLegacyPostBuildHeadAttribute": false,
"useCssCascadeLayers": false,

View File

@ -89,6 +89,7 @@ exports[`loadSite custom-i18n-site loads site 1`] = `
"customFields": {},
"future": {
"experimental_faster": {
"gitEagerVcs": false,
"lightningCssMinimizer": false,
"mdxCrossCompilerCache": false,
"rspackBundler": false,
@ -103,6 +104,11 @@ exports[`loadSite custom-i18n-site loads site 1`] = `
"namespace": false,
"type": "localStorage",
},
"experimental_vcs": {
"getFileCreationInfo": [Function],
"getFileLastUpdateInfo": [Function],
"initialize": [Function],
},
"v4": {
"removeLegacyPostBuildHeadAttribute": false,
"useCssCascadeLayers": false,
@ -256,6 +262,7 @@ exports[`loadSite simple-site-with-baseUrl loads site - custom config 1`] = `
"customFields": {},
"future": {
"experimental_faster": {
"gitEagerVcs": false,
"lightningCssMinimizer": false,
"mdxCrossCompilerCache": false,
"rspackBundler": false,
@ -270,6 +277,11 @@ exports[`loadSite simple-site-with-baseUrl loads site - custom config 1`] = `
"namespace": false,
"type": "localStorage",
},
"experimental_vcs": {
"getFileCreationInfo": [Function],
"getFileLastUpdateInfo": [Function],
"initialize": [Function],
},
"v4": {
"removeLegacyPostBuildHeadAttribute": false,
"useCssCascadeLayers": false,
@ -415,6 +427,7 @@ exports[`loadSite simple-site-with-baseUrl loads site - custom outDir 1`] = `
"customFields": {},
"future": {
"experimental_faster": {
"gitEagerVcs": false,
"lightningCssMinimizer": false,
"mdxCrossCompilerCache": false,
"rspackBundler": false,
@ -429,6 +442,11 @@ exports[`loadSite simple-site-with-baseUrl loads site - custom outDir 1`] = `
"namespace": false,
"type": "localStorage",
},
"experimental_vcs": {
"getFileCreationInfo": [Function],
"getFileLastUpdateInfo": [Function],
"initialize": [Function],
},
"v4": {
"removeLegacyPostBuildHeadAttribute": false,
"useCssCascadeLayers": false,
@ -574,6 +592,7 @@ exports[`loadSite simple-site-with-baseUrl loads site 1`] = `
"customFields": {},
"future": {
"experimental_faster": {
"gitEagerVcs": false,
"lightningCssMinimizer": false,
"mdxCrossCompilerCache": false,
"rspackBundler": false,
@ -588,6 +607,11 @@ exports[`loadSite simple-site-with-baseUrl loads site 1`] = `
"namespace": false,
"type": "localStorage",
},
"experimental_vcs": {
"getFileCreationInfo": [Function],
"getFileLastUpdateInfo": [Function],
"initialize": [Function],
},
"v4": {
"removeLegacyPostBuildHeadAttribute": false,
"useCssCascadeLayers": false,
@ -777,6 +801,7 @@ exports[`loadSite simple-site-with-baseUrl-i18n loads site - locale fr + custom
"customFields": {},
"future": {
"experimental_faster": {
"gitEagerVcs": false,
"lightningCssMinimizer": false,
"mdxCrossCompilerCache": false,
"rspackBundler": false,
@ -791,6 +816,11 @@ exports[`loadSite simple-site-with-baseUrl-i18n loads site - locale fr + custom
"namespace": false,
"type": "localStorage",
},
"experimental_vcs": {
"getFileCreationInfo": [Function],
"getFileLastUpdateInfo": [Function],
"initialize": [Function],
},
"v4": {
"removeLegacyPostBuildHeadAttribute": false,
"useCssCascadeLayers": false,
@ -1002,6 +1032,7 @@ exports[`loadSite simple-site-with-baseUrl-i18n loads site - custom outDir 1`] =
"customFields": {},
"future": {
"experimental_faster": {
"gitEagerVcs": false,
"lightningCssMinimizer": false,
"mdxCrossCompilerCache": false,
"rspackBundler": false,
@ -1016,6 +1047,11 @@ exports[`loadSite simple-site-with-baseUrl-i18n loads site - custom outDir 1`] =
"namespace": false,
"type": "localStorage",
},
"experimental_vcs": {
"getFileCreationInfo": [Function],
"getFileLastUpdateInfo": [Function],
"initialize": [Function],
},
"v4": {
"removeLegacyPostBuildHeadAttribute": false,
"useCssCascadeLayers": false,
@ -1227,6 +1263,7 @@ exports[`loadSite simple-site-with-baseUrl-i18n loads site - locale de 1`] = `
"customFields": {},
"future": {
"experimental_faster": {
"gitEagerVcs": false,
"lightningCssMinimizer": false,
"mdxCrossCompilerCache": false,
"rspackBundler": false,
@ -1241,6 +1278,11 @@ exports[`loadSite simple-site-with-baseUrl-i18n loads site - locale de 1`] = `
"namespace": false,
"type": "localStorage",
},
"experimental_vcs": {
"getFileCreationInfo": [Function],
"getFileLastUpdateInfo": [Function],
"initialize": [Function],
},
"v4": {
"removeLegacyPostBuildHeadAttribute": false,
"useCssCascadeLayers": false,
@ -1452,6 +1494,7 @@ exports[`loadSite simple-site-with-baseUrl-i18n loads site - locale en 1`] = `
"customFields": {},
"future": {
"experimental_faster": {
"gitEagerVcs": false,
"lightningCssMinimizer": false,
"mdxCrossCompilerCache": false,
"rspackBundler": false,
@ -1466,6 +1509,11 @@ exports[`loadSite simple-site-with-baseUrl-i18n loads site - locale en 1`] = `
"namespace": false,
"type": "localStorage",
},
"experimental_vcs": {
"getFileCreationInfo": [Function],
"getFileLastUpdateInfo": [Function],
"initialize": [Function],
},
"v4": {
"removeLegacyPostBuildHeadAttribute": false,
"useCssCascadeLayers": false,
@ -1677,6 +1725,7 @@ exports[`loadSite simple-site-with-baseUrl-i18n loads site - locale es 1`] = `
"customFields": {},
"future": {
"experimental_faster": {
"gitEagerVcs": false,
"lightningCssMinimizer": false,
"mdxCrossCompilerCache": false,
"rspackBundler": false,
@ -1691,6 +1740,11 @@ exports[`loadSite simple-site-with-baseUrl-i18n loads site - locale es 1`] = `
"namespace": false,
"type": "localStorage",
},
"experimental_vcs": {
"getFileCreationInfo": [Function],
"getFileLastUpdateInfo": [Function],
"initialize": [Function],
},
"v4": {
"removeLegacyPostBuildHeadAttribute": false,
"useCssCascadeLayers": false,
@ -1902,6 +1956,7 @@ exports[`loadSite simple-site-with-baseUrl-i18n loads site - locale fr 1`] = `
"customFields": {},
"future": {
"experimental_faster": {
"gitEagerVcs": false,
"lightningCssMinimizer": false,
"mdxCrossCompilerCache": false,
"rspackBundler": false,
@ -1916,6 +1971,11 @@ exports[`loadSite simple-site-with-baseUrl-i18n loads site - locale fr 1`] = `
"namespace": false,
"type": "localStorage",
},
"experimental_vcs": {
"getFileCreationInfo": [Function],
"getFileLastUpdateInfo": [Function],
"initialize": [Function],
},
"v4": {
"removeLegacyPostBuildHeadAttribute": false,
"useCssCascadeLayers": false,
@ -2127,6 +2187,7 @@ exports[`loadSite simple-site-with-baseUrl-i18n loads site - locale it 1`] = `
"customFields": {},
"future": {
"experimental_faster": {
"gitEagerVcs": false,
"lightningCssMinimizer": false,
"mdxCrossCompilerCache": false,
"rspackBundler": false,
@ -2141,6 +2202,11 @@ exports[`loadSite simple-site-with-baseUrl-i18n loads site - locale it 1`] = `
"namespace": false,
"type": "localStorage",
},
"experimental_vcs": {
"getFileCreationInfo": [Function],
"getFileLastUpdateInfo": [Function],
"initialize": [Function],
},
"v4": {
"removeLegacyPostBuildHeadAttribute": false,
"useCssCascadeLayers": false,
@ -2352,6 +2418,7 @@ exports[`loadSite simple-site-with-baseUrl-i18n loads site 1`] = `
"customFields": {},
"future": {
"experimental_faster": {
"gitEagerVcs": false,
"lightningCssMinimizer": false,
"mdxCrossCompilerCache": false,
"rspackBundler": false,
@ -2366,6 +2433,11 @@ exports[`loadSite simple-site-with-baseUrl-i18n loads site 1`] = `
"namespace": false,
"type": "localStorage",
},
"experimental_vcs": {
"getFileCreationInfo": [Function],
"getFileLastUpdateInfo": [Function],
"initialize": [Function],
},
"v4": {
"removeLegacyPostBuildHeadAttribute": false,
"useCssCascadeLayers": false,

View File

@ -6,6 +6,7 @@
*/
import {jest} from '@jest/globals';
import {getVcsPreset} from '@docusaurus/utils';
import {
ConfigSchema,
DEFAULT_CONFIG,
@ -29,6 +30,8 @@ import type {
PluginConfig,
I18nConfig,
I18nLocaleConfig,
VcsConfig,
VcsPreset,
} from '@docusaurus/types';
import type {DeepPartial} from 'utility-types';
@ -68,11 +71,17 @@ describe('normalizeConfig', () => {
rspackBundler: true,
rspackPersistentCache: true,
ssgWorkerThreads: true,
gitEagerVcs: true,
},
experimental_storage: {
type: 'sessionStorage',
namespace: true,
},
experimental_vcs: {
initialize: (_params) => {},
getFileCreationInfo: (_filePath) => null,
getFileLastUpdateInfo: (_filePath) => null,
},
experimental_router: 'hash',
},
tagline: 'my awesome site',
@ -1076,6 +1085,12 @@ describe('future', () => {
rspackBundler: true,
rspackPersistentCache: true,
ssgWorkerThreads: true,
gitEagerVcs: true,
},
experimental_vcs: {
initialize: (_params) => {},
getFileCreationInfo: (_filePath) => null,
getFileLastUpdateInfo: (_filePath) => null,
},
experimental_storage: {
type: 'sessionStorage',
@ -1394,6 +1409,196 @@ describe('future', () => {
});
});
describe('vcs', () => {
function vcsContaining(vcs: Partial<VcsConfig>) {
return futureContaining({
experimental_vcs: expect.objectContaining(vcs),
});
}
describe('base', () => {
it('accepts vcs - undefined', () => {
expect(
normalizeConfig({
future: {
experimental_vcs: undefined,
},
}),
).toEqual(
futureContaining({
...DEFAULT_FUTURE_CONFIG,
experimental_vcs: getVcsPreset('default-v1'),
}),
);
});
it('accepts vcs - true', () => {
expect(
normalizeConfig({
future: {
experimental_vcs: true,
},
}),
).toEqual(
futureContaining({
...DEFAULT_FUTURE_CONFIG,
experimental_vcs: getVcsPreset('default-v1'),
}),
);
});
it('accepts vcs - false', () => {
expect(
normalizeConfig({
future: {
experimental_vcs: false,
},
}),
).toEqual(
futureContaining({
...DEFAULT_FUTURE_CONFIG,
experimental_vcs: getVcsPreset('disabled'),
}),
);
});
});
describe('presets', () => {
it('accepts git-ad-hoc', () => {
const presetName: VcsPreset = 'git-ad-hoc';
expect(
normalizeConfig({
future: {
experimental_vcs: presetName,
},
}),
).toEqual(vcsContaining(getVcsPreset(presetName)));
});
it('accepts git-eager', () => {
const presetName: VcsPreset = 'git-eager';
expect(
normalizeConfig({
future: {
experimental_vcs: presetName,
},
}),
).toEqual(vcsContaining(getVcsPreset(presetName)));
});
it('accepts hardcoded', () => {
const presetName: VcsPreset = 'hardcoded';
expect(
normalizeConfig({
future: {
experimental_vcs: presetName,
},
}),
).toEqual(vcsContaining(getVcsPreset(presetName)));
});
it('rejects unknown preset name', () => {
// @ts-expect-error: invalid on purpose
const presetName: VcsPreset = 'unknown-preset-name';
expect(() =>
normalizeConfig({
future: {
experimental_vcs: presetName,
},
}),
).toThrowErrorMatchingInlineSnapshot(`
""future.experimental_vcs" failed custom validation because VCS config preset name 'unknown-preset-name' is not valid.
"
`);
});
});
describe('object config', () => {
it('accepts vcs - full', () => {
const vcs: VcsConfig = {
initialize: (_params) => {},
getFileCreationInfo: (_filePath) => null,
getFileLastUpdateInfo: (_filePath) => null,
};
expect(
normalizeConfig({
future: {
experimental_vcs: vcs,
},
}),
).toEqual(vcsContaining(vcs));
});
it('rejects vcs - empty', () => {
expect(() =>
normalizeConfig({
future: {experimental_vcs: {}},
}),
).toThrowErrorMatchingInlineSnapshot(`
""future.experimental_vcs" failed custom validation because "initialize" is required
"
`);
});
it('accepts vcs - bad initialize() arity', () => {
const vcs: VcsConfig = {
// @ts-expect-error: invalid arity
initialize: (_params, _extraParam) => {},
getFileCreationInfo: (_filePath) => null,
getFileLastUpdateInfo: (_filePath) => null,
};
expect(() =>
normalizeConfig({
future: {
experimental_vcs: vcs,
},
}),
).toThrowErrorMatchingInlineSnapshot(`
""future.experimental_vcs" failed custom validation because "initialize" must have an arity lesser or equal to 1
"
`);
});
it('accepts vcs - bad getFileCreationInfo() arity', () => {
const vcs: VcsConfig = {
initialize: (_params) => {},
// @ts-expect-error: invalid arity
getFileCreationInfo: (_filePath, _extraParam) => null,
getFileLastUpdateInfo: (_filePath) => null,
};
expect(() =>
normalizeConfig({
future: {
experimental_vcs: vcs,
},
}),
).toThrowErrorMatchingInlineSnapshot(`
""future.experimental_vcs" failed custom validation because "getFileCreationInfo" must have an arity of 1
"
`);
});
it('accepts vcs - bad getFileLastUpdateInfo() arity', () => {
const vcs: VcsConfig = {
initialize: (_params) => {},
getFileCreationInfo: (_filePath) => null,
// @ts-expect-error: invalid arity
getFileLastUpdateInfo: (_filePath, _extraParam) => null,
};
expect(() =>
normalizeConfig({
future: {
experimental_vcs: vcs,
},
}),
).toThrowErrorMatchingInlineSnapshot(`
""future.experimental_vcs" failed custom validation because "getFileLastUpdateInfo" must have an arity of 1
"
`);
});
});
});
describe('faster', () => {
function fasterContaining(faster: Partial<FasterConfig>) {
return futureContaining({
@ -1429,6 +1634,7 @@ describe('future', () => {
rspackBundler: true,
rspackPersistentCache: true,
ssgWorkerThreads: true,
gitEagerVcs: true,
};
expect(
normalizeConfig({
@ -2141,6 +2347,87 @@ describe('future', () => {
`);
});
});
describe('gitEagerVcs', () => {
it('accepts - undefined', () => {
const faster: Partial<FasterConfig> = {
gitEagerVcs: undefined,
};
expect(
normalizeConfig({
future: {
experimental_faster: faster,
},
}),
).toEqual(fasterContaining({gitEagerVcs: false}));
});
it('accepts - true', () => {
const faster: Partial<FasterConfig> = {
gitEagerVcs: true,
};
expect(
normalizeConfig({
future: {
experimental_faster: faster,
},
}),
).toEqual(
futureContaining({
experimental_faster: expect.objectContaining(faster),
experimental_vcs: getVcsPreset('default-v2'),
}),
);
});
it('accepts - false', () => {
const faster: Partial<FasterConfig> = {
gitEagerVcs: false,
};
expect(
normalizeConfig({
future: {
experimental_faster: faster,
},
}),
).toEqual(
futureContaining({
experimental_faster: expect.objectContaining(faster),
experimental_vcs: getVcsPreset('default-v1'),
}),
);
});
it('rejects - null', () => {
// @ts-expect-error: invalid
const faster: Partial<FasterConfig> = {gitEagerVcs: 42};
expect(() =>
normalizeConfig({
future: {
experimental_faster: faster,
},
}),
).toThrowErrorMatchingInlineSnapshot(`
""future.experimental_faster.gitEagerVcs" must be a boolean
"
`);
});
it('rejects - number', () => {
// @ts-expect-error: invalid
const faster: Partial<FasterConfig> = {gitEagerVcs: 42};
expect(() =>
normalizeConfig({
future: {
experimental_faster: faster,
},
}),
).toThrowErrorMatchingInlineSnapshot(`
""future.experimental_faster.gitEagerVcs" must be a boolean
"
`);
});
});
});
describe('v4', () => {

View File

@ -6,27 +6,31 @@
*/
import {
DEFAULT_I18N_DIR_NAME,
DEFAULT_PARSE_FRONT_MATTER,
DEFAULT_STATIC_DIR_NAME,
DEFAULT_I18N_DIR_NAME,
getVcsPreset,
VcsPresetNames,
} from '@docusaurus/utils';
import {Joi, printWarning} from '@docusaurus/utils-validation';
import {
addTrailingSlash,
addLeadingSlash,
addTrailingSlash,
removeTrailingSlash,
} from '@docusaurus/utils-common';
import logger from '@docusaurus/logger';
import type {
DocusaurusConfig,
FasterConfig,
FutureConfig,
FutureV4Config,
StorageConfig,
DocusaurusConfig,
I18nConfig,
I18nLocaleConfig,
MarkdownConfig,
MarkdownHooks,
I18nLocaleConfig,
StorageConfig,
VcsConfig,
VcsPreset,
} from '@docusaurus/types';
const DEFAULT_I18N_LOCALE = 'en';
@ -77,6 +81,7 @@ export const DEFAULT_FASTER_CONFIG: FasterConfig = {
rspackBundler: false,
rspackPersistentCache: false,
ssgWorkerThreads: false,
gitEagerVcs: false,
};
// When using the "faster: true" shortcut
@ -89,6 +94,7 @@ export const DEFAULT_FASTER_CONFIG_TRUE: FasterConfig = {
rspackBundler: true,
rspackPersistentCache: true,
ssgWorkerThreads: true,
gitEagerVcs: true,
};
export const DEFAULT_FUTURE_V4_CONFIG: FutureV4Config = {
@ -106,6 +112,7 @@ export const DEFAULT_FUTURE_CONFIG: FutureConfig = {
v4: DEFAULT_FUTURE_V4_CONFIG,
experimental_faster: DEFAULT_FASTER_CONFIG,
experimental_storage: DEFAULT_STORAGE_CONFIG,
experimental_vcs: getVcsPreset('default-v1'),
experimental_router: 'browser',
};
@ -291,6 +298,7 @@ const FASTER_CONFIG_SCHEMA = Joi.alternatives()
ssgWorkerThreads: Joi.boolean().default(
DEFAULT_FASTER_CONFIG.ssgWorkerThreads,
),
gitEagerVcs: Joi.boolean().default(DEFAULT_FASTER_CONFIG.gitEagerVcs),
}),
Joi.boolean()
.required()
@ -331,10 +339,41 @@ const STORAGE_CONFIG_SCHEMA = Joi.object({
.optional()
.default(DEFAULT_STORAGE_CONFIG);
const VCS_CONFIG_OBJECT_SCHEMA = Joi.object<VcsConfig>({
// All the fields are required on purpose
// You either provide a full VCS config or nothing
initialize: Joi.function().maxArity(1).required(),
getFileCreationInfo: Joi.function().arity(1).required(),
getFileLastUpdateInfo: Joi.function().arity(1).required(),
});
const VCS_CONFIG_SCHEMA = Joi.custom((input) => {
if (typeof input === 'string') {
const presetName = input as VcsPreset;
if (!VcsPresetNames.includes(presetName)) {
throw new Error(`VCS config preset name '${input}' is not valid.`);
}
return getVcsPreset(presetName);
}
if (typeof input === 'boolean') {
// We return the boolean on purpose
// We'll normalize it to a real VcsConfig later
// This is annoying, but we have to read the future flag to switch to the
// new "default-v2" config (not easy to do it here)
return input;
}
const {error, value} = VCS_CONFIG_OBJECT_SCHEMA.validate(input);
if (error) {
throw error;
}
return value;
}).default(true);
const FUTURE_CONFIG_SCHEMA = Joi.object<FutureConfig>({
v4: FUTURE_V4_SCHEMA,
experimental_faster: FASTER_CONFIG_SCHEMA,
experimental_storage: STORAGE_CONFIG_SCHEMA,
experimental_vcs: VCS_CONFIG_SCHEMA,
experimental_router: Joi.string()
.equal('browser', 'hash')
.default(DEFAULT_FUTURE_CONFIG.experimental_router),
@ -498,6 +537,17 @@ Please migrate and move this option to code=${'siteConfig.markdown.hooks.onBroke
config.onBrokenMarkdownLinks = undefined;
}
// We normalize the VCS config when using a boolean value
if (typeof config.future.experimental_vcs === 'boolean') {
const vcsConfig = config.future.experimental_vcs
? config.future.experimental_faster.gitEagerVcs
? getVcsPreset('default-v2')
: getVcsPreset('default-v1')
: getVcsPreset('disabled');
config.future.experimental_vcs = vcsConfig;
}
if (
config.future.experimental_faster.ssgWorkerThreads &&
!config.future.v4.removeLegacyPostBuildHeadAttribute

View File

@ -111,7 +111,7 @@ Make sure it is a valid BCP 47 locale name (e.g. en, fr, fr-FR, etc.) and/or pro
}
}
export function loadI18nLocaleList({
export function getLocaleList({
i18nConfig,
currentLocale,
}: {
@ -140,7 +140,7 @@ export async function loadI18n({
}): Promise<I18n> {
const {i18n: i18nConfig} = config;
const locales = loadI18nLocaleList({
const locales = getLocaleList({
i18nConfig,
currentLocale,
});

View File

@ -101,6 +101,15 @@ export async function loadContext(
}),
});
// Not sure where is the best place to put this VCS initialization call?
// The sooner is probably the better
// Note: we don't await the result on purpose!
// VCS initialization can be slow for large repos, and we don't want to block
// VCS integrations should be carefully designed to avoid blocking
PerfLogger.async('VCS init', () => {
return initialSiteConfig.future.experimental_vcs.initialize({siteDir});
});
const currentBundler = await getCurrentBundler({
siteConfig: initialSiteConfig,
});

View File

@ -314,6 +314,7 @@ Sucipto
sunsetting
Supabase
supabase
superproject
svgs
swizzlable
Sébastien
@ -335,6 +336,7 @@ twoslash
typesafe
Typesense
typesense
Udeadbeefcafe
Unavatar
unlinkable
Unlisteds

View File

@ -266,10 +266,100 @@ export default {
- [`rspackPersistentCache`](https://github.com/facebook/docusaurus/pull/10931): Use [Rspack Persistent Cache](https://rspack.dev/config/cache) to re-build your app faster on subsequent builds. Requires `rspackBundler: true`. Requires persisting `./node_modules/.cache` across rebuilds.
- [`mdxCrossCompilerCache`](https://github.com/facebook/docusaurus/pull/10479): Compile MDX files only once for both browser/Node.js environments instead of twice.
- [`ssgWorkerThreads`](https://github.com/facebook/docusaurus/pull/10826): Using a Node.js worker thread pool to execute the static site generation phase faster. Requires `future.v4.removeLegacyPostBuildHeadAttribute` to be turned on.
- [`gitEagerVcs`](https://github.com/facebook/docusaurus/pull/11512): Upgrades the default [VCS strategy](#vcs) to `default-v2`, that reads your whole Git repository at once instead of per-file, making Git operations faster on large repositories.
- `experimental_storage`: Site-wide browser storage options that theme authors should strive to respect.
- `type`: The browser storage theme authors should use. Possible values are `localStorage` and `sessionStorage`. Defaults to `localStorage`.
- `namespace`: Whether to namespace the browser storage keys to avoid storage key conflicts when Docusaurus sites are hosted under the same domain, or on localhost. Possible values are `string | boolean`. The namespace is appended at the end of the storage keys `key-namespace`. Use `true` to automatically generate a random namespace from your site `url + baseUrl`. Defaults to `false` (no namespace, historical behavior).
- `experimental_router`: The router type to use. Possible values are `browser` and `hash`. Defaults to `browser`. The `hash` router is only useful for rare cases where you want to opt-out of static site generation, have a fully client-side app with a single `index.html` entrypoint file. This can be useful to distribute a Docusaurus site as a `.zip` archive that you can [browse locally without running a web server](https://github.com/facebook/docusaurus/issues/3825).
- [`experimental_vcs`](#vcs): The Version Control System (VCS) implementation to use to read file info (creation/last update date/author). Read the [dedicated section](#vcs) below for details.
#### `experimental_vcs` {#vcs}
This exposes an API that lets you provide your own Version Control System (VCS) implementation to read file info (creation/last update date/author).
```ts
export default {
future: {
experimental_vcs: {
initialize: ({siteDir}) => {
// Initialize your VCS client here.
// If you want to read your VCS eagerly/incrementally on startup,
// this is the place to do it.
// This function is synchronous on purpose and not awaited
// It should not delay Docusaurus startup, but be run in parallel.
},
getFileCreationInfo: async (filePath: string) => {
// Provide your own implementation to read file creation info.
return getFileCreationInfo(filePath);
},
getFileLastUpdateInfo: async (filePath: string) => {
// Provide your own implementation to read file creation info.
return getFileLastUpdateInfo(filePath);
},
},
},
};
```
##### VCS Presets {#vcs-presets}
It is possible to pass a boolean VCS value:
- `true`: enables the default VCS preset (`default-v1` or `default-v2`, depending on the Docusaurus Faster `gitEagerVcs` flag value)
- `false`: disables the VCS, always returns `null` for all files
```ts
export default {
future: {
experimental_vcs: true, // Enables the default VCS preset
},
};
```
It is also possible to choose VCS preset we provide out of the box by its name.
```ts
export default {
future: {
experimental_vcs: 'presetName',
},
};
```
The available preset names are:
- `git-ad-hoc`: the historical `git log <filename>` based strategy.
- `git-eager`: the new Git strategy that reads your whole repository upfront.
- `hardcoded`: returns hardcoded value, useful in dev/tests to speed up developer experience.
- `disabled`: returns `null` for all files, considering them untracked.
- `default-v1`: the historical default (`git-ad-hoc` in prod, `hardcoded` in dev)
- `default-v2`: the upcoming default (`git-eager` in prod, `hardcoded` in dev)
Unless you have specific needs, we recommend using the default presets (`default-v1` or `default-v2`), that skip reading file info in development mode for better performance.
##### VCS Types {#vcs-types}
```ts
type VcsChangeInfo = {timestamp: number; author: string};
type VscInitializeParams = {
siteDir: string;
};
type VcsConfig = {
initialize: (params: VscInitializeParams) => void;
getFileCreationInfo: (filePath: string) => Promise<VcsChangeInfo | null>;
getFileLastUpdateInfo: (filePath: string) => Promise<VcsChangeInfo | null>;
};
type VcsPreset =
| 'git-ad-hoc'
| 'git-eager'
| 'hardcoded'
| 'disabled'
| 'default-v1'
| 'default-v2';
```
### `noIndex` {#noIndex}
@ -540,7 +630,7 @@ type MDX1CompatOptions =
headingIds: boolean;
};
export type ParseFrontMatter = (params: {
type ParseFrontMatter = (params: {
filePath: string;
fileContent: string;
defaultParseFrontMatter: ParseFrontMatter;

View File

@ -25,7 +25,7 @@ import ConfigLocalized from './docusaurus.config.localized.json';
import PrismLight from './src/utils/prismLight';
import PrismDark from './src/utils/prismDark';
import type {Config, DocusaurusConfig} from '@docusaurus/types';
import type {Config, DocusaurusConfig, VcsPreset} from '@docusaurus/types';
import type * as Preset from '@docusaurus/preset-classic';
import type {Options as DocsOptions} from '@docusaurus/plugin-content-docs';
@ -108,6 +108,8 @@ if (isSlower) {
const router = process.env
.DOCUSAURUS_ROUTER as DocusaurusConfig['future']['experimental_router'];
const vcs = process.env.DOCUSAURUS_SITE_VCS as VcsPreset;
const isDev = process.env.NODE_ENV === 'development';
// See https://docs.netlify.com/configure-builds/environment-variables/
@ -160,7 +162,8 @@ function getLocalizedConfigValue(key: keyof typeof ConfigLocalized) {
// By default, we don't want to run "git log" commands on i18n sites
// This makes localized sites build much slower on Netlify
// See also https://github.com/facebook/docusaurus/issues/11208
const showLastUpdate = process.env.DOCUSAURUS_CURRENT_LOCALE === defaultLocale;
// const showLastUpdate = process.env.DOCUSAURUS_CURRENT_LOCALE === defaultLocale;
const showLastUpdate = true;
export default async function createConfigAsync() {
return {
@ -185,10 +188,12 @@ export default async function createConfigAsync() {
rspackBundler: true,
rspackPersistentCache: true,
ssgWorkerThreads: true,
gitEagerVcs: true,
},
experimental_storage: {
namespace: true,
},
experimental_vcs: vcs,
experimental_router: router,
},
// Dogfood both settings:

View File

@ -56,6 +56,7 @@
"@mermaid-js/layout-elk": "^0.1.9",
"clsx": "^2.0.0",
"color": "^4.2.3",
"execa": "^5.1.1",
"fs-extra": "^11.1.1",
"netlify-plugin-cache": "^1.0.3",
"raw-loader": "^4.0.2",

View File

@ -6088,9 +6088,9 @@ caniuse-api@^3.0.0:
lodash.uniq "^4.5.0"
caniuse-lite@^1.0.0, caniuse-lite@^1.0.30001702, caniuse-lite@^1.0.30001718:
version "1.0.30001721"
resolved "https://registry.yarnpkg.com/caniuse-lite/-/caniuse-lite-1.0.30001721.tgz#36b90cd96901f8c98dd6698bf5c8af7d4c6872d7"
integrity sha512-cOuvmUVtKrtEaoKiO0rSc29jcjwMwX5tOHDy4MgVFEWiUXj4uBMJkwI8MDySkgXidpMiHUcviogAvFi4pA2hDQ==
version "1.0.30001754"
resolved "https://registry.yarnpkg.com/caniuse-lite/-/caniuse-lite-1.0.30001754.tgz#7758299d9a72cce4e6b038788a15b12b44002759"
integrity sha512-x6OeBXueoAceOmotzx3PO4Zpt4rzpeIFsSr6AAePTZxSkXiYDUmpypEl7e2+8NCd9bD7bXjqyef8CJYPC1jfxg==
ccount@^2.0.0:
version "2.0.1"
@ -8790,7 +8790,7 @@ execa@5.0.0:
signal-exit "^3.0.3"
strip-final-newline "^2.0.0"
execa@5.1.1, execa@^5.0.0:
execa@^5.0.0, execa@^5.1.1:
version "5.1.1"
resolved "https://registry.yarnpkg.com/execa/-/execa-5.1.1.tgz#f80ad9cbf4298f7bd1d4c9555c21e93741c411dd"
integrity sha512-8uSpZZocAZRBAPIEINJj3Lo9HyGitllczc27Eh5YYojjMFMn8yHMDMaUHE2Jqfq05D/wucwI4JGURyXt1vchyg==