mirror of
https://github.com/facebook/docusaurus.git
synced 2025-12-26 01:33:02 +00:00
test: enable a few jest eslint rules (#6900)
* test: enable a few jest eslint rules * more
This commit is contained in:
parent
1efc6c6091
commit
aa5a2d4c04
21
.eslintrc.js
21
.eslintrc.js
|
|
@ -205,9 +205,26 @@ module.exports = {
|
|||
'import/order': OFF,
|
||||
'import/prefer-default-export': OFF,
|
||||
|
||||
'jest/prefer-expect-resolves': WARNING,
|
||||
'jest/consistent-test-it': WARNING,
|
||||
'jest/expect-expect': OFF,
|
||||
'jest/valid-title': OFF,
|
||||
'jest/no-large-snapshots': [
|
||||
WARNING,
|
||||
{maxSize: Infinity, inlineMaxSize: 10},
|
||||
],
|
||||
'jest/prefer-expect-resolves': WARNING,
|
||||
'jest/prefer-lowercase-title': [WARNING, {ignore: ['describe']}],
|
||||
'jest/require-top-level-describe': ERROR,
|
||||
'jest/valid-title': [
|
||||
ERROR,
|
||||
{
|
||||
mustNotMatch: {
|
||||
it: [
|
||||
'^should|\\.$',
|
||||
'Titles should not begin with "should" or end with a full-stop',
|
||||
],
|
||||
},
|
||||
},
|
||||
],
|
||||
|
||||
'jsx-a11y/click-events-have-key-events': WARNING,
|
||||
'jsx-a11y/no-noninteractive-element-interactions': WARNING,
|
||||
|
|
|
|||
|
|
@ -26,12 +26,12 @@ async function getPackagesJsonFiles(): Promise<PackageJsonFile[]> {
|
|||
}
|
||||
|
||||
describe('packages', () => {
|
||||
test('should be found', async () => {
|
||||
it('are found', async () => {
|
||||
const packageJsonFiles = await getPackagesJsonFiles();
|
||||
expect(packageJsonFiles.length).toBeGreaterThan(0);
|
||||
});
|
||||
|
||||
test('should contain repository and directory for every package', async () => {
|
||||
it('contain repository and directory', async () => {
|
||||
const packageJsonFiles = await getPackagesJsonFiles();
|
||||
|
||||
packageJsonFiles
|
||||
|
|
@ -51,7 +51,7 @@ describe('packages', () => {
|
|||
This will make you publish an incomplete list of Docusaurus packages
|
||||
when trying to release with lerna-publish
|
||||
*/
|
||||
test('should have publishConfig.access: "public" when name starts with @', async () => {
|
||||
it('have publishConfig.access: "public" when name starts with @', async () => {
|
||||
const packageJsonFiles = await getPackagesJsonFiles();
|
||||
|
||||
packageJsonFiles
|
||||
|
|
|
|||
|
|
@ -23,11 +23,11 @@ const processFixture = (name) => {
|
|||
};
|
||||
|
||||
describe('remove-overridden-custom-properties', () => {
|
||||
test('overridden custom properties should be removed', () => {
|
||||
it('overridden custom properties should be removed', () => {
|
||||
expect(processFixture('normal')).toMatchSnapshot();
|
||||
});
|
||||
|
||||
test('overridden custom properties with `!important` rule should not be removed', () => {
|
||||
it('overridden custom properties with `!important` rule should not be removed', () => {
|
||||
expect(processFixture('important_rule')).toMatchSnapshot();
|
||||
});
|
||||
});
|
||||
|
|
|
|||
|
|
@ -0,0 +1,69 @@
|
|||
// Jest Snapshot v1, https://goo.gl/fbAQLP
|
||||
|
||||
exports[`error prints objects 1`] = `
|
||||
Array [
|
||||
Array [
|
||||
"[31m[1m[ERROR][22m {\\"a\\":1}[39m",
|
||||
],
|
||||
Array [
|
||||
"[31m[1m[ERROR][22m undefined[39m",
|
||||
],
|
||||
Array [
|
||||
"[31m[1m[ERROR][22m 1,2,3[39m",
|
||||
],
|
||||
Array [
|
||||
"[31m[1m[ERROR][22m Sat Nov 13 2021 00:00:00 GMT+0000 (Coordinated Universal Time)[39m",
|
||||
],
|
||||
]
|
||||
`;
|
||||
|
||||
exports[`info prints objects 1`] = `
|
||||
Array [
|
||||
Array [
|
||||
"[36m[1m[INFO][22m[39m {\\"a\\":1}",
|
||||
],
|
||||
Array [
|
||||
"[36m[1m[INFO][22m[39m undefined",
|
||||
],
|
||||
Array [
|
||||
"[36m[1m[INFO][22m[39m 1,2,3",
|
||||
],
|
||||
Array [
|
||||
"[36m[1m[INFO][22m[39m Sat Nov 13 2021 00:00:00 GMT+0000 (Coordinated Universal Time)",
|
||||
],
|
||||
]
|
||||
`;
|
||||
|
||||
exports[`success prints objects 1`] = `
|
||||
Array [
|
||||
Array [
|
||||
"[32m[1m[SUCCESS][22m[39m {\\"a\\":1}",
|
||||
],
|
||||
Array [
|
||||
"[32m[1m[SUCCESS][22m[39m undefined",
|
||||
],
|
||||
Array [
|
||||
"[32m[1m[SUCCESS][22m[39m 1,2,3",
|
||||
],
|
||||
Array [
|
||||
"[32m[1m[SUCCESS][22m[39m Sat Nov 13 2021 00:00:00 GMT+0000 (Coordinated Universal Time)",
|
||||
],
|
||||
]
|
||||
`;
|
||||
|
||||
exports[`warn prints objects 1`] = `
|
||||
Array [
|
||||
Array [
|
||||
"[33m[1m[WARNING][22m {\\"a\\":1}[39m",
|
||||
],
|
||||
Array [
|
||||
"[33m[1m[WARNING][22m undefined[39m",
|
||||
],
|
||||
Array [
|
||||
"[33m[1m[WARNING][22m 1,2,3[39m",
|
||||
],
|
||||
Array [
|
||||
"[33m[1m[WARNING][22m Sat Nov 13 2021 00:00:00 GMT+0000 (Coordinated Universal Time)[39m",
|
||||
],
|
||||
]
|
||||
`;
|
||||
|
|
@ -9,22 +9,22 @@ import {jest} from '@jest/globals';
|
|||
import logger from '../index';
|
||||
|
||||
describe('formatters', () => {
|
||||
test('path', () => {
|
||||
it('path', () => {
|
||||
expect(logger.path('hey')).toMatchInlineSnapshot(`"[36m[4mhey[24m[39m"`);
|
||||
});
|
||||
test('id', () => {
|
||||
it('id', () => {
|
||||
expect(logger.name('hey')).toMatchInlineSnapshot(`"[34m[1mhey[22m[39m"`);
|
||||
});
|
||||
test('code', () => {
|
||||
it('code', () => {
|
||||
expect(logger.code('hey')).toMatchInlineSnapshot(`"[36m\`hey\`[39m"`);
|
||||
});
|
||||
test('subdue', () => {
|
||||
it('subdue', () => {
|
||||
expect(logger.subdue('hey')).toMatchInlineSnapshot(`"[90mhey[39m"`);
|
||||
});
|
||||
});
|
||||
|
||||
describe('interpolate', () => {
|
||||
test('should format text with variables & arrays', () => {
|
||||
it('formats text with variables & arrays', () => {
|
||||
const name = 'Josh';
|
||||
const items = [1, 'hi', 'Hmmm'];
|
||||
expect(logger.interpolate`Hello ${name}! Here are your goodies:${items}`)
|
||||
|
|
@ -35,14 +35,14 @@ describe('interpolate', () => {
|
|||
- Hmmm"
|
||||
`);
|
||||
});
|
||||
test('should recognize valid flags', () => {
|
||||
it('recognizes valid flags', () => {
|
||||
expect(
|
||||
logger.interpolate`The package at path=${'packages/docusaurus'} has number=${10} files. name=${'Babel'} is exported here subdue=${'(as a preset)'} that you can with code=${"require.resolve('@docusaurus/core/lib/babel/preset')"}`,
|
||||
).toMatchInlineSnapshot(
|
||||
`"The package at [36m[4mpackages/docusaurus[24m[39m has [33m10[39m files. [34m[1mBabel[22m[39m is exported here [90m(as a preset)[39m that you can with [36m\`require.resolve('@docusaurus/core/lib/babel/preset')\`[39m"`,
|
||||
);
|
||||
});
|
||||
test('should interpolate arrays with flags', () => {
|
||||
it('interpolates arrays with flags', () => {
|
||||
expect(
|
||||
logger.interpolate`The following commands are available:code=${[
|
||||
'docusaurus start',
|
||||
|
|
@ -56,14 +56,14 @@ describe('interpolate', () => {
|
|||
- [36m\`docusaurus deploy\`[39m"
|
||||
`);
|
||||
});
|
||||
test('should print detached flags as-is', () => {
|
||||
it('prints detached flags as-is', () => {
|
||||
expect(
|
||||
logger.interpolate`You can use placeholders like code= ${'and it will'} be replaced with the succeeding arguments`,
|
||||
).toMatchInlineSnapshot(
|
||||
`"You can use placeholders like code= and it will be replaced with the succeeding arguments"`,
|
||||
);
|
||||
});
|
||||
test('should throw with bad flags', () => {
|
||||
it('throws with bad flags', () => {
|
||||
expect(
|
||||
() =>
|
||||
logger.interpolate`I mistyped this: cde=${'this code'} and I will be damned`,
|
||||
|
|
@ -75,104 +75,44 @@ describe('interpolate', () => {
|
|||
|
||||
describe('info', () => {
|
||||
const consoleMock = jest.spyOn(console, 'info').mockImplementation(() => {});
|
||||
test('should print objects', () => {
|
||||
it('prints objects', () => {
|
||||
logger.info({a: 1});
|
||||
logger.info(undefined);
|
||||
logger.info([1, 2, 3]);
|
||||
logger.info(new Date(2021, 10, 13));
|
||||
expect(consoleMock.mock.calls).toMatchInlineSnapshot(`
|
||||
Array [
|
||||
Array [
|
||||
"[36m[1m[INFO][22m[39m {\\"a\\":1}",
|
||||
],
|
||||
Array [
|
||||
"[36m[1m[INFO][22m[39m undefined",
|
||||
],
|
||||
Array [
|
||||
"[36m[1m[INFO][22m[39m 1,2,3",
|
||||
],
|
||||
Array [
|
||||
"[36m[1m[INFO][22m[39m Sat Nov 13 2021 00:00:00 GMT+0000 (Coordinated Universal Time)",
|
||||
],
|
||||
]
|
||||
`);
|
||||
expect(consoleMock.mock.calls).toMatchSnapshot();
|
||||
});
|
||||
});
|
||||
|
||||
describe('warn', () => {
|
||||
const consoleMock = jest.spyOn(console, 'warn').mockImplementation(() => {});
|
||||
test('should print objects', () => {
|
||||
it('prints objects', () => {
|
||||
logger.warn({a: 1});
|
||||
logger.warn(undefined);
|
||||
logger.warn([1, 2, 3]);
|
||||
logger.warn(new Date(2021, 10, 13));
|
||||
expect(consoleMock.mock.calls).toMatchInlineSnapshot(`
|
||||
Array [
|
||||
Array [
|
||||
"[33m[1m[WARNING][22m {\\"a\\":1}[39m",
|
||||
],
|
||||
Array [
|
||||
"[33m[1m[WARNING][22m undefined[39m",
|
||||
],
|
||||
Array [
|
||||
"[33m[1m[WARNING][22m 1,2,3[39m",
|
||||
],
|
||||
Array [
|
||||
"[33m[1m[WARNING][22m Sat Nov 13 2021 00:00:00 GMT+0000 (Coordinated Universal Time)[39m",
|
||||
],
|
||||
]
|
||||
`);
|
||||
expect(consoleMock.mock.calls).toMatchSnapshot();
|
||||
});
|
||||
});
|
||||
|
||||
describe('error', () => {
|
||||
const consoleMock = jest.spyOn(console, 'error').mockImplementation(() => {});
|
||||
test('should print objects', () => {
|
||||
it('prints objects', () => {
|
||||
logger.error({a: 1});
|
||||
logger.error(undefined);
|
||||
logger.error([1, 2, 3]);
|
||||
logger.error(new Date(2021, 10, 13));
|
||||
expect(consoleMock.mock.calls).toMatchInlineSnapshot(`
|
||||
Array [
|
||||
Array [
|
||||
"[31m[1m[ERROR][22m {\\"a\\":1}[39m",
|
||||
],
|
||||
Array [
|
||||
"[31m[1m[ERROR][22m undefined[39m",
|
||||
],
|
||||
Array [
|
||||
"[31m[1m[ERROR][22m 1,2,3[39m",
|
||||
],
|
||||
Array [
|
||||
"[31m[1m[ERROR][22m Sat Nov 13 2021 00:00:00 GMT+0000 (Coordinated Universal Time)[39m",
|
||||
],
|
||||
]
|
||||
`);
|
||||
expect(consoleMock.mock.calls).toMatchSnapshot();
|
||||
});
|
||||
});
|
||||
|
||||
describe('success', () => {
|
||||
const consoleMock = jest.spyOn(console, 'log').mockImplementation(() => {});
|
||||
test('should print objects', () => {
|
||||
it('prints objects', () => {
|
||||
logger.success({a: 1});
|
||||
logger.success(undefined);
|
||||
logger.success([1, 2, 3]);
|
||||
logger.success(new Date(2021, 10, 13));
|
||||
expect(consoleMock.mock.calls).toMatchInlineSnapshot(`
|
||||
Array [
|
||||
Array [
|
||||
"[32m[1m[SUCCESS][22m[39m {\\"a\\":1}",
|
||||
],
|
||||
Array [
|
||||
"[32m[1m[SUCCESS][22m[39m undefined",
|
||||
],
|
||||
Array [
|
||||
"[32m[1m[SUCCESS][22m[39m 1,2,3",
|
||||
],
|
||||
Array [
|
||||
"[32m[1m[SUCCESS][22m[39m Sat Nov 13 2021 00:00:00 GMT+0000 (Coordinated Universal Time)",
|
||||
],
|
||||
]
|
||||
`);
|
||||
expect(consoleMock.mock.calls).toMatchSnapshot();
|
||||
});
|
||||
});
|
||||
|
|
|
|||
|
|
@ -27,8 +27,8 @@ function heading(label, id) {
|
|||
);
|
||||
}
|
||||
|
||||
describe('headings plugin', () => {
|
||||
test('should patch `id`s and `data.hProperties.id', () => {
|
||||
describe('headings remark plugin', () => {
|
||||
it('patches `id`s and `data.hProperties.id', () => {
|
||||
const result = process('# Normal\n\n## Table of Contents\n\n# Baz\n');
|
||||
const expected = u('root', [
|
||||
u(
|
||||
|
|
@ -55,7 +55,7 @@ describe('headings plugin', () => {
|
|||
expect(result).toEqual(expected);
|
||||
});
|
||||
|
||||
test('should not overwrite `data` on headings', () => {
|
||||
it('does not overwrite `data` on headings', () => {
|
||||
const result = process('# Normal\n', [
|
||||
() => {
|
||||
function transform(tree) {
|
||||
|
|
@ -78,7 +78,7 @@ describe('headings plugin', () => {
|
|||
expect(result).toEqual(expected);
|
||||
});
|
||||
|
||||
test('should not overwrite `data.hProperties` on headings', () => {
|
||||
it('does not overwrite `data.hProperties` on headings', () => {
|
||||
const result = process('# Normal\n', [
|
||||
() => {
|
||||
function transform(tree) {
|
||||
|
|
@ -101,7 +101,7 @@ describe('headings plugin', () => {
|
|||
expect(result).toEqual(expected);
|
||||
});
|
||||
|
||||
test('should generate `id`s and `hProperties.id`s, based on `hProperties.id` if they exist', () => {
|
||||
it('generates `id`s and `hProperties.id`s, based on `hProperties.id` if they exist', () => {
|
||||
const result = process(
|
||||
[
|
||||
'## Something',
|
||||
|
|
@ -157,7 +157,7 @@ describe('headings plugin', () => {
|
|||
expect(result).toEqual(expected);
|
||||
});
|
||||
|
||||
test('should create GitHub-style headings ids', () => {
|
||||
it('creates GitHub-style headings ids', () => {
|
||||
const result = process(
|
||||
[
|
||||
'## I ♥ unicode',
|
||||
|
|
@ -225,7 +225,7 @@ describe('headings plugin', () => {
|
|||
expect(result).toEqual(expected);
|
||||
});
|
||||
|
||||
test('should generate id from only text contents of headings if they contains HTML tags', () => {
|
||||
it('generates id from only text contents of headings if they contains HTML tags', () => {
|
||||
const result = process('# <span class="normal-header">Normal</span>\n');
|
||||
const expected = u('root', [
|
||||
u(
|
||||
|
|
@ -245,7 +245,7 @@ describe('headings plugin', () => {
|
|||
expect(result).toEqual(expected);
|
||||
});
|
||||
|
||||
test('should create custom headings ids', () => {
|
||||
it('creates custom headings ids', () => {
|
||||
const result = process(`
|
||||
# Heading One {#custom_h1}
|
||||
|
||||
|
|
|
|||
|
|
@ -1,6 +1,6 @@
|
|||
// Jest Snapshot v1, https://goo.gl/fbAQLP
|
||||
|
||||
exports[`inline code should be escaped 1`] = `
|
||||
exports[`toc remark plugin escapes inline code 1`] = `
|
||||
"export const toc = [
|
||||
{
|
||||
value: '<code><Head /></code>',
|
||||
|
|
@ -48,7 +48,120 @@ exports[`inline code should be escaped 1`] = `
|
|||
"
|
||||
`;
|
||||
|
||||
exports[`non text phrasing content 1`] = `
|
||||
exports[`toc remark plugin exports even with existing name 1`] = `
|
||||
"export const toc = [
|
||||
{
|
||||
value: 'Thanos',
|
||||
id: 'thanos',
|
||||
level: 2
|
||||
},
|
||||
{
|
||||
value: 'Tony Stark',
|
||||
id: 'tony-stark',
|
||||
level: 2
|
||||
},
|
||||
{
|
||||
value: 'Avengers',
|
||||
id: 'avengers',
|
||||
level: 3
|
||||
}
|
||||
];
|
||||
|
||||
## Thanos
|
||||
|
||||
## Tony Stark
|
||||
|
||||
### Avengers
|
||||
"
|
||||
`;
|
||||
|
||||
exports[`toc remark plugin exports with custom name 1`] = `
|
||||
"export const customName = [
|
||||
{
|
||||
value: 'Endi',
|
||||
id: 'endi',
|
||||
level: 3
|
||||
},
|
||||
{
|
||||
value: 'Endi',
|
||||
id: 'endi-1',
|
||||
level: 2
|
||||
},
|
||||
{
|
||||
value: 'Yangshun',
|
||||
id: 'yangshun',
|
||||
level: 3
|
||||
},
|
||||
{
|
||||
value: 'I ♥ unicode.',
|
||||
id: 'i--unicode',
|
||||
level: 2
|
||||
}
|
||||
];
|
||||
|
||||
### Endi
|
||||
|
||||
\`\`\`md
|
||||
## This is ignored
|
||||
\`\`\`
|
||||
|
||||
## Endi
|
||||
|
||||
Lorem ipsum
|
||||
|
||||
### Yangshun
|
||||
|
||||
Some content here
|
||||
|
||||
## I ♥ unicode.
|
||||
"
|
||||
`;
|
||||
|
||||
exports[`toc remark plugin handles empty headings 1`] = `
|
||||
"export const toc = [];
|
||||
|
||||
# Ignore this
|
||||
|
||||
##
|
||||
|
||||
## 
|
||||
"
|
||||
`;
|
||||
|
||||
exports[`toc remark plugin inserts below imports 1`] = `
|
||||
"import something from 'something';
|
||||
|
||||
import somethingElse from 'something-else';
|
||||
|
||||
export const toc = [
|
||||
{
|
||||
value: 'Title',
|
||||
id: 'title',
|
||||
level: 2
|
||||
},
|
||||
{
|
||||
value: 'Test',
|
||||
id: 'test',
|
||||
level: 2
|
||||
},
|
||||
{
|
||||
value: 'Again',
|
||||
id: 'again',
|
||||
level: 3
|
||||
}
|
||||
];
|
||||
|
||||
## Title
|
||||
|
||||
## Test
|
||||
|
||||
### Again
|
||||
|
||||
Content.
|
||||
"
|
||||
`;
|
||||
|
||||
exports[`toc remark plugin works on non text phrasing content 1`] = `
|
||||
"export const toc = [
|
||||
{
|
||||
value: '<em>Emphasis</em>',
|
||||
|
|
@ -88,3 +201,45 @@ exports[`non text phrasing content 1`] = `
|
|||
## \`inline.code()\`
|
||||
"
|
||||
`;
|
||||
|
||||
exports[`toc remark plugin works on text content 1`] = `
|
||||
"export const toc = [
|
||||
{
|
||||
value: 'Endi',
|
||||
id: 'endi',
|
||||
level: 3
|
||||
},
|
||||
{
|
||||
value: 'Endi',
|
||||
id: 'endi-1',
|
||||
level: 2
|
||||
},
|
||||
{
|
||||
value: 'Yangshun',
|
||||
id: 'yangshun',
|
||||
level: 3
|
||||
},
|
||||
{
|
||||
value: 'I ♥ unicode.',
|
||||
id: 'i--unicode',
|
||||
level: 2
|
||||
}
|
||||
];
|
||||
|
||||
### Endi
|
||||
|
||||
\`\`\`md
|
||||
## This is ignored
|
||||
\`\`\`
|
||||
|
||||
## Endi
|
||||
|
||||
Lorem ipsum
|
||||
|
||||
### Yangshun
|
||||
|
||||
Some content here
|
||||
|
||||
## I ♥ unicode.
|
||||
"
|
||||
`;
|
||||
|
|
|
|||
|
|
@ -24,185 +24,42 @@ const processFixture = async (name, options?) => {
|
|||
return result.toString();
|
||||
};
|
||||
|
||||
test('non text phrasing content', async () => {
|
||||
const result = await processFixture('non-text-content');
|
||||
expect(result).toMatchSnapshot();
|
||||
});
|
||||
|
||||
test('inline code should be escaped', async () => {
|
||||
const result = await processFixture('inline-code');
|
||||
expect(result).toMatchSnapshot();
|
||||
});
|
||||
|
||||
test('text content', async () => {
|
||||
const result = await processFixture('just-content');
|
||||
expect(result).toMatchInlineSnapshot(`
|
||||
"export const toc = [
|
||||
{
|
||||
value: 'Endi',
|
||||
id: 'endi',
|
||||
level: 3
|
||||
},
|
||||
{
|
||||
value: 'Endi',
|
||||
id: 'endi-1',
|
||||
level: 2
|
||||
},
|
||||
{
|
||||
value: 'Yangshun',
|
||||
id: 'yangshun',
|
||||
level: 3
|
||||
},
|
||||
{
|
||||
value: 'I ♥ unicode.',
|
||||
id: 'i--unicode',
|
||||
level: 2
|
||||
}
|
||||
];
|
||||
|
||||
### Endi
|
||||
|
||||
\`\`\`md
|
||||
## This is ignored
|
||||
\`\`\`
|
||||
|
||||
## Endi
|
||||
|
||||
Lorem ipsum
|
||||
|
||||
### Yangshun
|
||||
|
||||
Some content here
|
||||
|
||||
## I ♥ unicode.
|
||||
"
|
||||
`);
|
||||
});
|
||||
|
||||
test('should export even with existing name', async () => {
|
||||
const result = await processFixture('name-exist');
|
||||
expect(result).toMatchInlineSnapshot(`
|
||||
"export const toc = [
|
||||
{
|
||||
value: 'Thanos',
|
||||
id: 'thanos',
|
||||
level: 2
|
||||
},
|
||||
{
|
||||
value: 'Tony Stark',
|
||||
id: 'tony-stark',
|
||||
level: 2
|
||||
},
|
||||
{
|
||||
value: 'Avengers',
|
||||
id: 'avengers',
|
||||
level: 3
|
||||
}
|
||||
];
|
||||
|
||||
## Thanos
|
||||
|
||||
## Tony Stark
|
||||
|
||||
### Avengers
|
||||
"
|
||||
`);
|
||||
});
|
||||
|
||||
test('should export with custom name', async () => {
|
||||
const options = {
|
||||
name: 'customName',
|
||||
};
|
||||
const result = await processFixture('just-content', options);
|
||||
expect(result).toMatchInlineSnapshot(`
|
||||
"export const customName = [
|
||||
{
|
||||
value: 'Endi',
|
||||
id: 'endi',
|
||||
level: 3
|
||||
},
|
||||
{
|
||||
value: 'Endi',
|
||||
id: 'endi-1',
|
||||
level: 2
|
||||
},
|
||||
{
|
||||
value: 'Yangshun',
|
||||
id: 'yangshun',
|
||||
level: 3
|
||||
},
|
||||
{
|
||||
value: 'I ♥ unicode.',
|
||||
id: 'i--unicode',
|
||||
level: 2
|
||||
}
|
||||
];
|
||||
|
||||
### Endi
|
||||
|
||||
\`\`\`md
|
||||
## This is ignored
|
||||
\`\`\`
|
||||
|
||||
## Endi
|
||||
|
||||
Lorem ipsum
|
||||
|
||||
### Yangshun
|
||||
|
||||
Some content here
|
||||
|
||||
## I ♥ unicode.
|
||||
"
|
||||
`);
|
||||
});
|
||||
|
||||
test('should insert below imports', async () => {
|
||||
const result = await processFixture('insert-below-imports');
|
||||
expect(result).toMatchInlineSnapshot(`
|
||||
"import something from 'something';
|
||||
|
||||
import somethingElse from 'something-else';
|
||||
|
||||
export const toc = [
|
||||
{
|
||||
value: 'Title',
|
||||
id: 'title',
|
||||
level: 2
|
||||
},
|
||||
{
|
||||
value: 'Test',
|
||||
id: 'test',
|
||||
level: 2
|
||||
},
|
||||
{
|
||||
value: 'Again',
|
||||
id: 'again',
|
||||
level: 3
|
||||
}
|
||||
];
|
||||
|
||||
## Title
|
||||
|
||||
## Test
|
||||
|
||||
### Again
|
||||
|
||||
Content.
|
||||
"
|
||||
`);
|
||||
});
|
||||
|
||||
test('empty headings', async () => {
|
||||
const result = await processFixture('empty-headings');
|
||||
expect(result).toMatchInlineSnapshot(`
|
||||
"export const toc = [];
|
||||
|
||||
# Ignore this
|
||||
|
||||
##
|
||||
|
||||
## 
|
||||
"
|
||||
`);
|
||||
describe('toc remark plugin', () => {
|
||||
it('works on non text phrasing content', async () => {
|
||||
const result = await processFixture('non-text-content');
|
||||
expect(result).toMatchSnapshot();
|
||||
});
|
||||
|
||||
it('escapes inline code', async () => {
|
||||
const result = await processFixture('inline-code');
|
||||
expect(result).toMatchSnapshot();
|
||||
});
|
||||
|
||||
it('works on text content', async () => {
|
||||
const result = await processFixture('just-content');
|
||||
expect(result).toMatchSnapshot();
|
||||
});
|
||||
|
||||
it('exports even with existing name', async () => {
|
||||
const result = await processFixture('name-exist');
|
||||
expect(result).toMatchSnapshot();
|
||||
});
|
||||
|
||||
it('exports with custom name', async () => {
|
||||
const options = {
|
||||
name: 'customName',
|
||||
};
|
||||
const result = await processFixture('just-content', options);
|
||||
expect(result).toMatchSnapshot();
|
||||
});
|
||||
|
||||
it('inserts below imports', async () => {
|
||||
const result = await processFixture('insert-below-imports');
|
||||
expect(result).toMatchSnapshot();
|
||||
});
|
||||
|
||||
it('handles empty headings', async () => {
|
||||
const result = await processFixture('empty-headings');
|
||||
expect(result).toMatchSnapshot();
|
||||
});
|
||||
});
|
||||
|
|
|
|||
|
|
@ -33,34 +33,34 @@ const staticDirs = [
|
|||
const siteDir = path.join(__dirname, '__fixtures__');
|
||||
|
||||
describe('transformImage plugin', () => {
|
||||
test('fail if image does not exist', async () => {
|
||||
it('fail if image does not exist', async () => {
|
||||
await expect(
|
||||
processFixture('fail', {staticDirs}),
|
||||
).rejects.toThrowErrorMatchingSnapshot();
|
||||
});
|
||||
test('fail if image relative path does not exist', async () => {
|
||||
it('fail if image relative path does not exist', async () => {
|
||||
await expect(
|
||||
processFixture('fail2', {staticDirs}),
|
||||
).rejects.toThrowErrorMatchingSnapshot();
|
||||
});
|
||||
test('fail if image url is absent', async () => {
|
||||
it('fail if image url is absent', async () => {
|
||||
await expect(
|
||||
processFixture('noUrl', {staticDirs}),
|
||||
).rejects.toThrowErrorMatchingSnapshot();
|
||||
});
|
||||
|
||||
test('transform md images to <img />', async () => {
|
||||
it('transform md images to <img />', async () => {
|
||||
const result = await processFixture('img', {staticDirs, siteDir});
|
||||
expect(result).toMatchSnapshot();
|
||||
});
|
||||
|
||||
test('pathname protocol', async () => {
|
||||
it('pathname protocol', async () => {
|
||||
const result = await processFixture('pathname', {staticDirs});
|
||||
expect(result).toMatchSnapshot();
|
||||
});
|
||||
|
||||
test('does not choke on invalid image', async () => {
|
||||
const errorMock = jest.spyOn(console, 'warn').mockImplementation();
|
||||
it('does not choke on invalid image', async () => {
|
||||
const errorMock = jest.spyOn(console, 'warn').mockImplementation(() => {});
|
||||
const result = await processFixture('invalid-img', {staticDirs});
|
||||
expect(result).toMatchSnapshot();
|
||||
expect(errorMock).toBeCalledTimes(1);
|
||||
|
|
|
|||
|
|
@ -34,24 +34,24 @@ const processFixture = async (name: string, options?) => {
|
|||
};
|
||||
|
||||
describe('transformAsset plugin', () => {
|
||||
test('fail if asset url is absent', async () => {
|
||||
it('fail if asset url is absent', async () => {
|
||||
await expect(
|
||||
processFixture('noUrl'),
|
||||
).rejects.toThrowErrorMatchingSnapshot();
|
||||
});
|
||||
|
||||
test('fail if asset with site alias does not exist', async () => {
|
||||
it('fail if asset with site alias does not exist', async () => {
|
||||
await expect(
|
||||
processFixture('nonexistentSiteAlias'),
|
||||
).rejects.toThrowErrorMatchingSnapshot();
|
||||
});
|
||||
|
||||
test('transform md links to <a />', async () => {
|
||||
it('transform md links to <a />', async () => {
|
||||
const result = await processFixture('asset');
|
||||
expect(result).toMatchSnapshot();
|
||||
});
|
||||
|
||||
test('pathname protocol', async () => {
|
||||
it('pathname protocol', async () => {
|
||||
const result = await processFixture('pathname');
|
||||
expect(result).toMatchSnapshot();
|
||||
});
|
||||
|
|
|
|||
|
|
@ -1,6 +1,6 @@
|
|||
// Jest Snapshot v1, https://goo.gl/fbAQLP
|
||||
|
||||
exports[`unwrapMdxCodeBlocks should unwrap the mdx code blocks 1`] = `
|
||||
exports[`unwrapMdxCodeBlocks remark plugin unwraps the mdx code blocks 1`] = `
|
||||
"# MDX code blocks test document
|
||||
|
||||
## Some basic markdown
|
||||
|
|
@ -95,7 +95,7 @@ cmd /C 'set \\"GIT_USER=<GITHUB_USERNAME>\\" && yarn deploy'
|
|||
"
|
||||
`;
|
||||
|
||||
exports[`unwrapMdxCodeBlocks should unwrap the mdx code blocks AST 1`] = `
|
||||
exports[`unwrapMdxCodeBlocks remark plugin unwraps the mdx code blocks AST 1`] = `
|
||||
Object {
|
||||
"children": Array [
|
||||
Object {
|
||||
|
|
|
|||
|
|
@ -22,14 +22,14 @@ const processFixtureAST = async (name: string) => {
|
|||
return remark().use(mdx).use(plugin).parse(file);
|
||||
};
|
||||
|
||||
describe('unwrapMdxCodeBlocks', () => {
|
||||
test('should unwrap the mdx code blocks', async () => {
|
||||
describe('unwrapMdxCodeBlocks remark plugin', () => {
|
||||
it('unwraps the mdx code blocks', async () => {
|
||||
const result = await processFixture('has-mdx-code-blocks.mdx');
|
||||
expect(result).toMatchSnapshot();
|
||||
});
|
||||
|
||||
// The AST output should be parsed correctly or the MDX loader won't work!
|
||||
test('should unwrap the mdx code blocks AST', async () => {
|
||||
it('unwraps the mdx code blocks AST', async () => {
|
||||
const result = await processFixtureAST('has-mdx-code-blocks.mdx');
|
||||
expect(result).toMatchSnapshot();
|
||||
});
|
||||
|
|
|
|||
|
|
@ -7,8 +7,8 @@
|
|||
|
||||
import {shouldQuotifyFrontMatter} from '../frontMatter';
|
||||
|
||||
describe('frontMatter', () => {
|
||||
test('shouldQuotifyFrontMatter', () => {
|
||||
describe('shouldQuotifyFrontMatter', () => {
|
||||
it('works', () => {
|
||||
expect(shouldQuotifyFrontMatter(['id', 'value'])).toEqual(false);
|
||||
expect(
|
||||
shouldQuotifyFrontMatter([
|
||||
|
|
|
|||
|
|
@ -12,10 +12,10 @@ import fs from 'fs-extra';
|
|||
import {posixPath} from '@docusaurus/utils';
|
||||
|
||||
async function testMigration(siteDir: string, newDir: string) {
|
||||
const writeMock = jest.spyOn(fs, 'outputFile').mockImplementation();
|
||||
const mkdirpMock = jest.spyOn(fs, 'mkdirp').mockImplementation();
|
||||
const mkdirsMock = jest.spyOn(fs, 'mkdirs').mockImplementation();
|
||||
const copyMock = jest.spyOn(fs, 'copy').mockImplementation();
|
||||
const writeMock = jest.spyOn(fs, 'outputFile').mockImplementation(() => {});
|
||||
const mkdirpMock = jest.spyOn(fs, 'mkdirp').mockImplementation(() => {});
|
||||
const mkdirsMock = jest.spyOn(fs, 'mkdirs').mockImplementation(() => {});
|
||||
const copyMock = jest.spyOn(fs, 'copy').mockImplementation(() => {});
|
||||
await migrateDocusaurusProject(siteDir, newDir, true, true);
|
||||
expect(
|
||||
writeMock.mock.calls.sort((a, b) =>
|
||||
|
|
@ -45,18 +45,18 @@ async function testMigration(siteDir: string, newDir: string) {
|
|||
|
||||
describe('migration test', () => {
|
||||
const fixtureDir = path.join(__dirname, '__fixtures__');
|
||||
test('simple website', async () => {
|
||||
it('simple website', async () => {
|
||||
const siteDir = path.join(fixtureDir, 'simple_website', 'website');
|
||||
const newDir = path.join(fixtureDir, 'migrated_simple_site');
|
||||
await testMigration(siteDir, newDir);
|
||||
});
|
||||
test('complex website', async () => {
|
||||
it('complex website', async () => {
|
||||
const siteDir = path.join(fixtureDir, 'complex_website', 'website');
|
||||
const newDir = path.join(fixtureDir, 'migrated_complex_site');
|
||||
await testMigration(siteDir, newDir);
|
||||
});
|
||||
|
||||
test('missing versions', async () => {
|
||||
it('missing versions', async () => {
|
||||
const siteDir = path.join(fixtureDir, 'missing_version_website', 'website');
|
||||
const newDir = path.join(fixtureDir, 'migrated_missing_version_site');
|
||||
await testMigration(siteDir, newDir);
|
||||
|
|
|
|||
|
|
@ -10,7 +10,7 @@ import {createConfigFile} from '../index';
|
|||
import type {VersionOneConfig} from '../types';
|
||||
|
||||
describe('create config', () => {
|
||||
test('simple test', () => {
|
||||
it('simple test', () => {
|
||||
const v1Config: VersionOneConfig = importFresh(
|
||||
`${__dirname}/__fixtures__/sourceSiteConfig.js`,
|
||||
);
|
||||
|
|
|
|||
|
|
@ -1,6 +1,6 @@
|
|||
// Jest Snapshot v1, https://goo.gl/fbAQLP
|
||||
|
||||
exports[`collectRedirects should throw if plugin option redirects contain invalid to paths 1`] = `
|
||||
exports[`collectRedirects throw if plugin option redirects contain invalid to paths 1`] = `
|
||||
"You are trying to create client-side redirections to paths that do not exist:
|
||||
- /this/path/does/not/exist2
|
||||
- /this/path/does/not/exist2
|
||||
|
|
@ -12,13 +12,13 @@ Valid paths you can redirect to:
|
|||
"
|
||||
`;
|
||||
|
||||
exports[`collectRedirects should throw if redirect creator creates array of array redirect 1`] = `
|
||||
exports[`collectRedirects throws if redirect creator creates array of array redirect 1`] = `
|
||||
"Some created redirects are invalid:
|
||||
- {\\"from\\":[\\"/fromPath\\"],\\"to\\":\\"/\\"} => Validation error: \\"from\\" must be a string
|
||||
"
|
||||
`;
|
||||
|
||||
exports[`collectRedirects should throw if redirect creator creates invalid redirects 1`] = `
|
||||
exports[`collectRedirects throws if redirect creator creates invalid redirects 1`] = `
|
||||
"Some created redirects are invalid:
|
||||
- {\\"from\\":\\"https://google.com/\\",\\"to\\":\\"/\\"} => Validation error: \\"from\\" is not a valid pathname. Pathname should start with slash and not contain any domain or query string.
|
||||
- {\\"from\\":\\"//abc\\",\\"to\\":\\"/\\"} => Validation error: \\"from\\" is not a valid pathname. Pathname should start with slash and not contain any domain or query string.
|
||||
|
|
|
|||
|
|
@ -1,6 +1,6 @@
|
|||
// Jest Snapshot v1, https://goo.gl/fbAQLP
|
||||
|
||||
exports[`createRedirectPageContent should encode uri special chars 1`] = `
|
||||
exports[`createRedirectPageContent encodes uri special chars 1`] = `
|
||||
"<!DOCTYPE html>
|
||||
<html>
|
||||
<head>
|
||||
|
|
@ -14,7 +14,7 @@ exports[`createRedirectPageContent should encode uri special chars 1`] = `
|
|||
</html>"
|
||||
`;
|
||||
|
||||
exports[`createRedirectPageContent should match snapshot 1`] = `
|
||||
exports[`createRedirectPageContent works 1`] = `
|
||||
"<!DOCTYPE html>
|
||||
<html>
|
||||
<head>
|
||||
|
|
|
|||
|
|
@ -1,6 +1,6 @@
|
|||
// Jest Snapshot v1, https://goo.gl/fbAQLP
|
||||
|
||||
exports[`normalizePluginOptions should reject bad createRedirects user inputs 1`] = `
|
||||
exports[`normalizePluginOptions rejects bad createRedirects user inputs 1`] = `
|
||||
"Invalid @docusaurus/plugin-client-redirects options: \\"createRedirects\\" must be of type function
|
||||
{
|
||||
\\"createRedirects\\": [
|
||||
|
|
@ -10,7 +10,7 @@ exports[`normalizePluginOptions should reject bad createRedirects user inputs 1`
|
|||
}"
|
||||
`;
|
||||
|
||||
exports[`normalizePluginOptions should reject bad fromExtensions user inputs 1`] = `
|
||||
exports[`normalizePluginOptions rejects bad fromExtensions user inputs 1`] = `
|
||||
"Invalid @docusaurus/plugin-client-redirects options: \\"fromExtensions[0]\\" contains an invalid value
|
||||
{
|
||||
\\"fromExtensions\\": [
|
||||
|
|
@ -22,7 +22,7 @@ exports[`normalizePluginOptions should reject bad fromExtensions user inputs 1`]
|
|||
}"
|
||||
`;
|
||||
|
||||
exports[`normalizePluginOptions should reject bad toExtensions user inputs 1`] = `
|
||||
exports[`normalizePluginOptions rejects bad toExtensions user inputs 1`] = `
|
||||
"Invalid @docusaurus/plugin-client-redirects options: \\"toExtensions[0]\\" contains an invalid value
|
||||
{
|
||||
\\"toExtensions\\": [
|
||||
|
|
|
|||
|
|
@ -1,6 +1,6 @@
|
|||
// Jest Snapshot v1, https://goo.gl/fbAQLP
|
||||
|
||||
exports[`toRedirectFilesMetadata should create appropriate metadata for empty baseUrl: fileContent baseUrl=empty 1`] = `
|
||||
exports[`toRedirectFilesMetadata creates appropriate metadata for empty baseUrl: fileContent baseUrl=empty 1`] = `
|
||||
Array [
|
||||
"<!DOCTYPE html>
|
||||
<html>
|
||||
|
|
@ -16,7 +16,7 @@ Array [
|
|||
]
|
||||
`;
|
||||
|
||||
exports[`toRedirectFilesMetadata should create appropriate metadata for root baseUrl: fileContent baseUrl=/ 1`] = `
|
||||
exports[`toRedirectFilesMetadata creates appropriate metadata for root baseUrl: fileContent baseUrl=/ 1`] = `
|
||||
Array [
|
||||
"<!DOCTYPE html>
|
||||
<html>
|
||||
|
|
@ -32,7 +32,7 @@ Array [
|
|||
]
|
||||
`;
|
||||
|
||||
exports[`toRedirectFilesMetadata should create appropriate metadata trailingSlash=false: fileContent 1`] = `
|
||||
exports[`toRedirectFilesMetadata creates appropriate metadata trailingSlash=false: fileContent 1`] = `
|
||||
Array [
|
||||
"<!DOCTYPE html>
|
||||
<html>
|
||||
|
|
@ -70,7 +70,7 @@ Array [
|
|||
]
|
||||
`;
|
||||
|
||||
exports[`toRedirectFilesMetadata should create appropriate metadata trailingSlash=true: fileContent 1`] = `
|
||||
exports[`toRedirectFilesMetadata creates appropriate metadata trailingSlash=true: fileContent 1`] = `
|
||||
Array [
|
||||
"<!DOCTYPE html>
|
||||
<html>
|
||||
|
|
@ -108,7 +108,7 @@ Array [
|
|||
]
|
||||
`;
|
||||
|
||||
exports[`toRedirectFilesMetadata should create appropriate metadata trailingSlash=undefined: fileContent 1`] = `
|
||||
exports[`toRedirectFilesMetadata creates appropriate metadata trailingSlash=undefined: fileContent 1`] = `
|
||||
Array [
|
||||
"<!DOCTYPE html>
|
||||
<html>
|
||||
|
|
|
|||
|
|
@ -24,7 +24,7 @@ function createTestPluginContext(
|
|||
}
|
||||
|
||||
describe('collectRedirects', () => {
|
||||
test('should collect no redirect for undefined config', () => {
|
||||
it('collects no redirect for undefined config', () => {
|
||||
expect(
|
||||
collectRedirects(
|
||||
createTestPluginContext(undefined, ['/', '/path']),
|
||||
|
|
@ -33,13 +33,13 @@ describe('collectRedirects', () => {
|
|||
).toEqual([]);
|
||||
});
|
||||
|
||||
test('should collect no redirect for empty config', () => {
|
||||
it('collects no redirect for empty config', () => {
|
||||
expect(collectRedirects(createTestPluginContext({}), undefined)).toEqual(
|
||||
[],
|
||||
);
|
||||
});
|
||||
|
||||
test('should collect redirects from html/exe extension', () => {
|
||||
it('collects redirects from html/exe extension', () => {
|
||||
expect(
|
||||
collectRedirects(
|
||||
createTestPluginContext(
|
||||
|
|
@ -62,7 +62,7 @@ describe('collectRedirects', () => {
|
|||
]);
|
||||
});
|
||||
|
||||
test('should collect redirects to html/exe extension', () => {
|
||||
it('collects redirects to html/exe extension', () => {
|
||||
expect(
|
||||
collectRedirects(
|
||||
createTestPluginContext(
|
||||
|
|
@ -81,7 +81,7 @@ describe('collectRedirects', () => {
|
|||
]);
|
||||
});
|
||||
|
||||
test('should collect redirects from plugin option redirects', () => {
|
||||
it('collects redirects from plugin option redirects', () => {
|
||||
expect(
|
||||
collectRedirects(
|
||||
createTestPluginContext(
|
||||
|
|
@ -117,7 +117,7 @@ describe('collectRedirects', () => {
|
|||
]);
|
||||
});
|
||||
|
||||
test('should collect redirects from plugin option redirects with trailingSlash=true', () => {
|
||||
it('collects redirects from plugin option redirects with trailingSlash=true', () => {
|
||||
expect(
|
||||
collectRedirects(
|
||||
createTestPluginContext(
|
||||
|
|
@ -153,7 +153,7 @@ describe('collectRedirects', () => {
|
|||
]);
|
||||
});
|
||||
|
||||
test('should collect redirects from plugin option redirects with trailingSlash=false', () => {
|
||||
it('collects redirects from plugin option redirects with trailingSlash=false', () => {
|
||||
expect(
|
||||
collectRedirects(
|
||||
createTestPluginContext(
|
||||
|
|
@ -189,7 +189,7 @@ describe('collectRedirects', () => {
|
|||
]);
|
||||
});
|
||||
|
||||
test('should throw if plugin option redirects contain invalid to paths', () => {
|
||||
it('throw if plugin option redirects contain invalid to paths', () => {
|
||||
expect(() =>
|
||||
collectRedirects(
|
||||
createTestPluginContext(
|
||||
|
|
@ -216,7 +216,7 @@ describe('collectRedirects', () => {
|
|||
).toThrowErrorMatchingSnapshot();
|
||||
});
|
||||
|
||||
test('should collect redirects with custom redirect creator', () => {
|
||||
it('collects redirects with custom redirect creator', () => {
|
||||
expect(
|
||||
collectRedirects(
|
||||
createTestPluginContext(
|
||||
|
|
@ -260,7 +260,7 @@ describe('collectRedirects', () => {
|
|||
]);
|
||||
});
|
||||
|
||||
test('should allow returning string / undefined', () => {
|
||||
it('allows returning string / undefined', () => {
|
||||
expect(
|
||||
collectRedirects(
|
||||
createTestPluginContext(
|
||||
|
|
@ -279,7 +279,7 @@ describe('collectRedirects', () => {
|
|||
).toEqual([{from: '/foo', to: '/'}]);
|
||||
});
|
||||
|
||||
test('should throw if redirect creator creates invalid redirects', () => {
|
||||
it('throws if redirect creator creates invalid redirects', () => {
|
||||
expect(() =>
|
||||
collectRedirects(
|
||||
createTestPluginContext(
|
||||
|
|
@ -302,7 +302,7 @@ describe('collectRedirects', () => {
|
|||
).toThrowErrorMatchingSnapshot();
|
||||
});
|
||||
|
||||
test('should throw if redirect creator creates array of array redirect', () => {
|
||||
it('throws if redirect creator creates array of array redirect', () => {
|
||||
expect(() =>
|
||||
collectRedirects(
|
||||
createTestPluginContext(
|
||||
|
|
@ -321,7 +321,7 @@ describe('collectRedirects', () => {
|
|||
).toThrowErrorMatchingSnapshot();
|
||||
});
|
||||
|
||||
test('should filter unwanted redirects', () => {
|
||||
it('filters unwanted redirects', () => {
|
||||
expect(
|
||||
collectRedirects(
|
||||
createTestPluginContext(
|
||||
|
|
|
|||
|
|
@ -8,13 +8,13 @@
|
|||
import createRedirectPageContent from '../createRedirectPageContent';
|
||||
|
||||
describe('createRedirectPageContent', () => {
|
||||
test('should match snapshot', () => {
|
||||
it('works', () => {
|
||||
expect(
|
||||
createRedirectPageContent({toUrl: 'https://docusaurus.io/'}),
|
||||
).toMatchSnapshot();
|
||||
});
|
||||
|
||||
test('should encode uri special chars', () => {
|
||||
it('encodes uri special chars', () => {
|
||||
const result = createRedirectPageContent({
|
||||
toUrl: 'https://docusaurus.io/gr/σελιδας/',
|
||||
});
|
||||
|
|
|
|||
|
|
@ -11,7 +11,7 @@ import {
|
|||
} from '../extensionRedirects';
|
||||
|
||||
describe('createToExtensionsRedirects', () => {
|
||||
test('should reject empty extensions', () => {
|
||||
it('rejects empty extensions', () => {
|
||||
expect(() => {
|
||||
createToExtensionsRedirects(['/'], ['']);
|
||||
}).toThrowErrorMatchingInlineSnapshot(`
|
||||
|
|
@ -20,7 +20,7 @@ describe('createToExtensionsRedirects', () => {
|
|||
`);
|
||||
});
|
||||
|
||||
test('should reject extensions with .', () => {
|
||||
it('rejects extensions with "."', () => {
|
||||
expect(() => {
|
||||
createToExtensionsRedirects(['/'], ['.html']);
|
||||
}).toThrowErrorMatchingInlineSnapshot(`
|
||||
|
|
@ -29,7 +29,7 @@ describe('createToExtensionsRedirects', () => {
|
|||
`);
|
||||
});
|
||||
|
||||
test('should reject extensions with /', () => {
|
||||
it('rejects extensions with /', () => {
|
||||
expect(() => {
|
||||
createToExtensionsRedirects(['/'], ['ht/ml']);
|
||||
}).toThrowErrorMatchingInlineSnapshot(`
|
||||
|
|
@ -38,7 +38,7 @@ describe('createToExtensionsRedirects', () => {
|
|||
`);
|
||||
});
|
||||
|
||||
test('should reject extensions with illegal url char', () => {
|
||||
it('rejects extensions with illegal url char', () => {
|
||||
expect(() => {
|
||||
createToExtensionsRedirects(['/'], [',']);
|
||||
}).toThrowErrorMatchingInlineSnapshot(`
|
||||
|
|
@ -47,7 +47,7 @@ describe('createToExtensionsRedirects', () => {
|
|||
`);
|
||||
});
|
||||
|
||||
test('should create redirects from html/htm extensions', () => {
|
||||
it('creates redirects from html/htm extensions', () => {
|
||||
const ext = ['html', 'htm'];
|
||||
expect(createToExtensionsRedirects([''], ext)).toEqual([]);
|
||||
expect(createToExtensionsRedirects(['/'], ext)).toEqual([]);
|
||||
|
|
@ -60,13 +60,13 @@ describe('createToExtensionsRedirects', () => {
|
|||
expect(createToExtensionsRedirects(['/abc.xyz'], ext)).toEqual([]);
|
||||
});
|
||||
|
||||
test('should create "to" redirects when relativeRoutesPath contains a prefix', () => {
|
||||
it('creates "to" redirects when relativeRoutesPath contains a prefix', () => {
|
||||
expect(
|
||||
createToExtensionsRedirects(['/prefix/file.html'], ['html']),
|
||||
).toEqual([{from: '/prefix/file', to: '/prefix/file.html'}]);
|
||||
});
|
||||
|
||||
test('should not create redirection for an empty extension array', () => {
|
||||
it('does not create redirection for an empty extension array', () => {
|
||||
const ext: string[] = [];
|
||||
expect(createToExtensionsRedirects([''], ext)).toEqual([]);
|
||||
expect(createToExtensionsRedirects(['/'], ext)).toEqual([]);
|
||||
|
|
@ -75,7 +75,7 @@ describe('createToExtensionsRedirects', () => {
|
|||
});
|
||||
|
||||
describe('createFromExtensionsRedirects', () => {
|
||||
test('should reject empty extensions', () => {
|
||||
it('rejects empty extensions', () => {
|
||||
expect(() => {
|
||||
createFromExtensionsRedirects(['/'], ['.html']);
|
||||
}).toThrowErrorMatchingInlineSnapshot(`
|
||||
|
|
@ -84,7 +84,7 @@ describe('createFromExtensionsRedirects', () => {
|
|||
`);
|
||||
});
|
||||
|
||||
test('should reject extensions with .', () => {
|
||||
it('rejects extensions with "."', () => {
|
||||
expect(() => {
|
||||
createFromExtensionsRedirects(['/'], ['.html']);
|
||||
}).toThrowErrorMatchingInlineSnapshot(`
|
||||
|
|
@ -93,7 +93,7 @@ describe('createFromExtensionsRedirects', () => {
|
|||
`);
|
||||
});
|
||||
|
||||
test('should reject extensions with /', () => {
|
||||
it('rejects extensions with /', () => {
|
||||
expect(() => {
|
||||
createFromExtensionsRedirects(['/'], ['ht/ml']);
|
||||
}).toThrowErrorMatchingInlineSnapshot(`
|
||||
|
|
@ -102,7 +102,7 @@ describe('createFromExtensionsRedirects', () => {
|
|||
`);
|
||||
});
|
||||
|
||||
test('should reject extensions with illegal url char', () => {
|
||||
it('rejects extensions with illegal url char', () => {
|
||||
expect(() => {
|
||||
createFromExtensionsRedirects(['/'], [',']);
|
||||
}).toThrowErrorMatchingInlineSnapshot(`
|
||||
|
|
@ -111,7 +111,7 @@ describe('createFromExtensionsRedirects', () => {
|
|||
`);
|
||||
});
|
||||
|
||||
test('should create redirects from html/htm extensions', () => {
|
||||
it('creates redirects from html/htm extensions', () => {
|
||||
const ext = ['html', 'htm'];
|
||||
expect(createFromExtensionsRedirects([''], ext)).toEqual([]);
|
||||
expect(createFromExtensionsRedirects(['/'], ext)).toEqual([]);
|
||||
|
|
@ -126,13 +126,13 @@ describe('createFromExtensionsRedirects', () => {
|
|||
]);
|
||||
});
|
||||
|
||||
test('should create "from" redirects when relativeRoutesPath contains a prefix', () => {
|
||||
it('creates "from" redirects when relativeRoutesPath contains a prefix', () => {
|
||||
expect(createFromExtensionsRedirects(['/prefix/file'], ['html'])).toEqual([
|
||||
{from: '/prefix/file.html', to: '/prefix/file'},
|
||||
]);
|
||||
});
|
||||
|
||||
test('should not create redirection for an empty extension array', () => {
|
||||
it('does not create redirection for an empty extension array', () => {
|
||||
const ext: string[] = [];
|
||||
expect(createFromExtensionsRedirects([''], ext)).toEqual([]);
|
||||
expect(createFromExtensionsRedirects(['/'], ext)).toEqual([]);
|
||||
|
|
|
|||
|
|
@ -8,18 +8,18 @@
|
|||
import normalizePluginOptions, {
|
||||
DefaultPluginOptions,
|
||||
} from '../normalizePluginOptions';
|
||||
import type {CreateRedirectsFnOption} from '../types';
|
||||
import type {CreateRedirectsFnOption} from '@docusaurus/plugin-client-redirects';
|
||||
|
||||
describe('normalizePluginOptions', () => {
|
||||
test('should return default options for undefined user options', () => {
|
||||
it('returns default options for undefined user options', () => {
|
||||
expect(normalizePluginOptions()).toEqual(DefaultPluginOptions);
|
||||
});
|
||||
|
||||
test('should return default options for empty user options', () => {
|
||||
it('returns default options for empty user options', () => {
|
||||
expect(normalizePluginOptions()).toEqual(DefaultPluginOptions);
|
||||
});
|
||||
|
||||
test('should override one default options with valid user options', () => {
|
||||
it('overrides one default options with valid user options', () => {
|
||||
expect(
|
||||
normalizePluginOptions({
|
||||
toExtensions: ['html'],
|
||||
|
|
@ -27,7 +27,7 @@ describe('normalizePluginOptions', () => {
|
|||
).toEqual({...DefaultPluginOptions, toExtensions: ['html']});
|
||||
});
|
||||
|
||||
test('should override all default options with valid user options', () => {
|
||||
it('overrides all default options with valid user options', () => {
|
||||
const createRedirects: CreateRedirectsFnOption = (_routePath: string) => [];
|
||||
expect(
|
||||
normalizePluginOptions({
|
||||
|
|
@ -45,7 +45,7 @@ describe('normalizePluginOptions', () => {
|
|||
});
|
||||
});
|
||||
|
||||
test('should reject bad fromExtensions user inputs', () => {
|
||||
it('rejects bad fromExtensions user inputs', () => {
|
||||
expect(() =>
|
||||
normalizePluginOptions({
|
||||
fromExtensions: [null, undefined, 123, true] as unknown as string[],
|
||||
|
|
@ -53,7 +53,7 @@ describe('normalizePluginOptions', () => {
|
|||
).toThrowErrorMatchingSnapshot();
|
||||
});
|
||||
|
||||
test('should reject bad toExtensions user inputs', () => {
|
||||
it('rejects bad toExtensions user inputs', () => {
|
||||
expect(() =>
|
||||
normalizePluginOptions({
|
||||
toExtensions: [null, undefined, 123, true] as unknown as string[],
|
||||
|
|
@ -61,7 +61,7 @@ describe('normalizePluginOptions', () => {
|
|||
).toThrowErrorMatchingSnapshot();
|
||||
});
|
||||
|
||||
test('should reject bad createRedirects user inputs', () => {
|
||||
it('rejects bad createRedirects user inputs', () => {
|
||||
expect(() =>
|
||||
normalizePluginOptions({
|
||||
createRedirects: ['bad', 'value'] as unknown as CreateRedirectsFnOption,
|
||||
|
|
|
|||
|
|
@ -8,7 +8,7 @@
|
|||
import {validateRedirect} from '../redirectValidation';
|
||||
|
||||
describe('validateRedirect', () => {
|
||||
test('validate good redirects without throwing', () => {
|
||||
it('validate good redirects without throwing', () => {
|
||||
expect(() => {
|
||||
validateRedirect({
|
||||
from: '/fromSomePath',
|
||||
|
|
@ -29,7 +29,7 @@ describe('validateRedirect', () => {
|
|||
}).not.toThrow();
|
||||
});
|
||||
|
||||
test('throw for bad redirects', () => {
|
||||
it('throw for bad redirects', () => {
|
||||
expect(() =>
|
||||
validateRedirect({
|
||||
from: 'https://fb.com/fromSomePath',
|
||||
|
|
|
|||
|
|
@ -17,7 +17,7 @@ import writeRedirectFiles, {
|
|||
// - https://github.com/facebook/docusaurus/issues/3886
|
||||
// - https://github.com/facebook/docusaurus/issues/3925
|
||||
describe('createToUrl', () => {
|
||||
test('should create appropriate redirect urls', async () => {
|
||||
it('creates appropriate redirect urls', async () => {
|
||||
expect(createToUrl('/', '/docs/something/else')).toEqual(
|
||||
'/docs/something/else',
|
||||
);
|
||||
|
|
@ -29,7 +29,7 @@ describe('createToUrl', () => {
|
|||
);
|
||||
});
|
||||
|
||||
test('should create appropriate redirect urls with baseUrl', async () => {
|
||||
it('creates appropriate redirect urls with baseUrl', async () => {
|
||||
expect(createToUrl('/baseUrl/', '/docs/something/else')).toEqual(
|
||||
'/baseUrl/docs/something/else',
|
||||
);
|
||||
|
|
@ -43,7 +43,7 @@ describe('createToUrl', () => {
|
|||
});
|
||||
|
||||
describe('toRedirectFilesMetadata', () => {
|
||||
test('should create appropriate metadata trailingSlash=undefined', async () => {
|
||||
it('creates appropriate metadata trailingSlash=undefined', async () => {
|
||||
const pluginContext = {
|
||||
outDir: '/tmp/someFixedOutDir',
|
||||
baseUrl: 'https://docusaurus.io',
|
||||
|
|
@ -70,7 +70,7 @@ describe('toRedirectFilesMetadata', () => {
|
|||
);
|
||||
});
|
||||
|
||||
test('should create appropriate metadata trailingSlash=true', async () => {
|
||||
it('creates appropriate metadata trailingSlash=true', async () => {
|
||||
const pluginContext = {
|
||||
outDir: '/tmp/someFixedOutDir',
|
||||
baseUrl: 'https://docusaurus.io',
|
||||
|
|
@ -97,7 +97,7 @@ describe('toRedirectFilesMetadata', () => {
|
|||
);
|
||||
});
|
||||
|
||||
test('should create appropriate metadata trailingSlash=false', async () => {
|
||||
it('creates appropriate metadata trailingSlash=false', async () => {
|
||||
const pluginContext = {
|
||||
outDir: '/tmp/someFixedOutDir',
|
||||
baseUrl: 'https://docusaurus.io',
|
||||
|
|
@ -127,7 +127,7 @@ describe('toRedirectFilesMetadata', () => {
|
|||
);
|
||||
});
|
||||
|
||||
test('should create appropriate metadata for root baseUrl', async () => {
|
||||
it('creates appropriate metadata for root baseUrl', async () => {
|
||||
const pluginContext = {
|
||||
outDir: '/tmp/someFixedOutDir',
|
||||
baseUrl: '/',
|
||||
|
|
@ -142,7 +142,7 @@ describe('toRedirectFilesMetadata', () => {
|
|||
);
|
||||
});
|
||||
|
||||
test('should create appropriate metadata for empty baseUrl', async () => {
|
||||
it('creates appropriate metadata for empty baseUrl', async () => {
|
||||
const pluginContext = {
|
||||
outDir: '/tmp/someFixedOutDir',
|
||||
baseUrl: '',
|
||||
|
|
@ -159,7 +159,7 @@ describe('toRedirectFilesMetadata', () => {
|
|||
});
|
||||
|
||||
describe('writeRedirectFiles', () => {
|
||||
test('write the files', async () => {
|
||||
it('write the files', async () => {
|
||||
const outDir = `/tmp/docusaurus_tests_${Math.random()}`;
|
||||
|
||||
const filesMetadata = [
|
||||
|
|
@ -184,7 +184,7 @@ describe('writeRedirectFiles', () => {
|
|||
).resolves.toEqual('content 2');
|
||||
});
|
||||
|
||||
test('avoid overwriting existing files', async () => {
|
||||
it('avoid overwriting existing files', async () => {
|
||||
const outDir = `/tmp/docusaurus_tests_${Math.random()}`;
|
||||
|
||||
const filesMetadata = [
|
||||
|
|
|
|||
|
|
@ -1,6 +1,6 @@
|
|||
// Jest Snapshot v1, https://goo.gl/fbAQLP
|
||||
|
||||
exports[`report broken markdown links 1`] = `
|
||||
exports[`linkify reports broken markdown links 1`] = `
|
||||
"---
|
||||
title: This post links to another one!
|
||||
---
|
||||
|
|
@ -15,7 +15,7 @@ title: This post links to another one!
|
|||
"
|
||||
`;
|
||||
|
||||
exports[`transform to correct link 1`] = `
|
||||
exports[`linkify transforms to correct link 1`] = `
|
||||
"---
|
||||
title: This post links to another one!
|
||||
---
|
||||
|
|
@ -1,6 +1,6 @@
|
|||
// Jest Snapshot v1, https://goo.gl/fbAQLP
|
||||
|
||||
exports[`blogFeed atom shows feed item for each post 1`] = `
|
||||
exports[`atom has feed item for each post 1`] = `
|
||||
Array [
|
||||
"<?xml version=\\"1.0\\" encoding=\\"utf-8\\"?>
|
||||
<feed xmlns=\\"http://www.w3.org/2005/Atom\\">
|
||||
|
|
@ -84,7 +84,7 @@ Array [
|
|||
]
|
||||
`;
|
||||
|
||||
exports[`blogFeed json shows feed item for each post 1`] = `
|
||||
exports[`json has feed item for each post 1`] = `
|
||||
Array [
|
||||
"{
|
||||
\\"version\\": \\"https://jsonfeed.org/version/1\\",
|
||||
|
|
@ -171,7 +171,7 @@ Array [
|
|||
]
|
||||
`;
|
||||
|
||||
exports[`blogFeed rss shows feed item for each post 1`] = `
|
||||
exports[`rss has feed item for each post 1`] = `
|
||||
Array [
|
||||
"<?xml version=\\"1.0\\" encoding=\\"utf-8\\"?>
|
||||
<rss version=\\"2.0\\" xmlns:dc=\\"http://purl.org/dc/elements/1.1/\\" xmlns:content=\\"http://purl.org/rss/1.0/modules/content/\\">
|
||||
|
|
|
|||
|
|
@ -1,6 +1,67 @@
|
|||
// Jest Snapshot v1, https://goo.gl/fbAQLP
|
||||
|
||||
exports[`loadBlog test blog tags 1`] = `
|
||||
exports[`blog plugin works on blog tags without pagination 1`] = `
|
||||
Object {
|
||||
"/blog/tags/tag-1": Object {
|
||||
"items": Array [
|
||||
"/simple/slug/another",
|
||||
"/another/tags",
|
||||
"/another/tags2",
|
||||
],
|
||||
"name": "tag1",
|
||||
"pages": Array [
|
||||
Object {
|
||||
"items": Array [
|
||||
"/simple/slug/another",
|
||||
"/another/tags",
|
||||
"/another/tags2",
|
||||
],
|
||||
"metadata": Object {
|
||||
"blogDescription": "Blog",
|
||||
"blogTitle": "Blog",
|
||||
"nextPage": null,
|
||||
"page": 1,
|
||||
"permalink": "/blog/tags/tag-1",
|
||||
"postsPerPage": 3,
|
||||
"previousPage": null,
|
||||
"totalCount": 3,
|
||||
"totalPages": 1,
|
||||
},
|
||||
},
|
||||
],
|
||||
"permalink": "/blog/tags/tag-1",
|
||||
},
|
||||
"/blog/tags/tag-2": Object {
|
||||
"items": Array [
|
||||
"/another/tags",
|
||||
"/another/tags2",
|
||||
],
|
||||
"name": "tag2",
|
||||
"pages": Array [
|
||||
Object {
|
||||
"items": Array [
|
||||
"/another/tags",
|
||||
"/another/tags2",
|
||||
],
|
||||
"metadata": Object {
|
||||
"blogDescription": "Blog",
|
||||
"blogTitle": "Blog",
|
||||
"nextPage": null,
|
||||
"page": 1,
|
||||
"permalink": "/blog/tags/tag-2",
|
||||
"postsPerPage": 2,
|
||||
"previousPage": null,
|
||||
"totalCount": 2,
|
||||
"totalPages": 1,
|
||||
},
|
||||
},
|
||||
],
|
||||
"permalink": "/blog/tags/tag-2",
|
||||
},
|
||||
}
|
||||
`;
|
||||
|
||||
exports[`blog plugin works with blog tags 1`] = `
|
||||
Object {
|
||||
"/blog/tags/tag-1": Object {
|
||||
"items": Array [
|
||||
|
|
@ -75,64 +136,3 @@ Object {
|
|||
},
|
||||
}
|
||||
`;
|
||||
|
||||
exports[`loadBlog test blog tags: no pagination 1`] = `
|
||||
Object {
|
||||
"/blog/tags/tag-1": Object {
|
||||
"items": Array [
|
||||
"/simple/slug/another",
|
||||
"/another/tags",
|
||||
"/another/tags2",
|
||||
],
|
||||
"name": "tag1",
|
||||
"pages": Array [
|
||||
Object {
|
||||
"items": Array [
|
||||
"/simple/slug/another",
|
||||
"/another/tags",
|
||||
"/another/tags2",
|
||||
],
|
||||
"metadata": Object {
|
||||
"blogDescription": "Blog",
|
||||
"blogTitle": "Blog",
|
||||
"nextPage": null,
|
||||
"page": 1,
|
||||
"permalink": "/blog/tags/tag-1",
|
||||
"postsPerPage": 3,
|
||||
"previousPage": null,
|
||||
"totalCount": 3,
|
||||
"totalPages": 1,
|
||||
},
|
||||
},
|
||||
],
|
||||
"permalink": "/blog/tags/tag-1",
|
||||
},
|
||||
"/blog/tags/tag-2": Object {
|
||||
"items": Array [
|
||||
"/another/tags",
|
||||
"/another/tags2",
|
||||
],
|
||||
"name": "tag2",
|
||||
"pages": Array [
|
||||
Object {
|
||||
"items": Array [
|
||||
"/another/tags",
|
||||
"/another/tags2",
|
||||
],
|
||||
"metadata": Object {
|
||||
"blogDescription": "Blog",
|
||||
"blogTitle": "Blog",
|
||||
"nextPage": null,
|
||||
"page": 1,
|
||||
"permalink": "/blog/tags/tag-2",
|
||||
"postsPerPage": 2,
|
||||
"previousPage": null,
|
||||
"totalCount": 2,
|
||||
"totalPages": 1,
|
||||
},
|
||||
},
|
||||
],
|
||||
"permalink": "/blog/tags/tag-2",
|
||||
},
|
||||
}
|
||||
`;
|
||||
|
|
|
|||
|
|
@ -1,5 +1,5 @@
|
|||
// Jest Snapshot v1, https://goo.gl/fbAQLP
|
||||
|
||||
exports[`should throw Error in case of invalid feedtype 1`] = `[ValidationError: "feedOptions.type" does not match any of the allowed types]`;
|
||||
exports[`blog plugin options schema throws Error in case of invalid feedtype 1`] = `[ValidationError: "feedOptions.type" does not match any of the allowed types]`;
|
||||
|
||||
exports[`should throw Error in case of invalid options 1`] = `[ValidationError: "postsPerPage" must be greater than or equal to 1]`;
|
||||
exports[`blog plugin options schema throws Error in case of invalid options 1`] = `[ValidationError: "postsPerPage" must be greater than or equal to 1]`;
|
||||
|
|
|
|||
|
|
@ -1,6 +1,6 @@
|
|||
// Jest Snapshot v1, https://goo.gl/fbAQLP
|
||||
|
||||
exports[`getContentTranslationFiles should return translation files matching snapshot 1`] = `
|
||||
exports[`getContentTranslationFiles returns translation files matching snapshot 1`] = `
|
||||
Array [
|
||||
Object {
|
||||
"content": Object {
|
||||
|
|
@ -22,7 +22,7 @@ Array [
|
|||
]
|
||||
`;
|
||||
|
||||
exports[`translateContent should fallback when translation is incomplete 1`] = `
|
||||
exports[`translateContent falls back when translation is incomplete 1`] = `
|
||||
Object {
|
||||
"blogListPaginated": Array [
|
||||
Object {
|
||||
|
|
@ -63,7 +63,7 @@ Object {
|
|||
}
|
||||
`;
|
||||
|
||||
exports[`translateContent should return translated loaded content matching snapshot 1`] = `
|
||||
exports[`translateContent returns translated loaded 1`] = `
|
||||
Object {
|
||||
"blogListPaginated": Array [
|
||||
Object {
|
||||
|
|
|
|||
|
|
@ -14,7 +14,7 @@ import {
|
|||
import path from 'path';
|
||||
|
||||
describe('getBlogPostAuthors', () => {
|
||||
test('can read no authors', () => {
|
||||
it('can read no authors', () => {
|
||||
expect(
|
||||
getBlogPostAuthors({
|
||||
frontMatter: {},
|
||||
|
|
@ -31,7 +31,7 @@ describe('getBlogPostAuthors', () => {
|
|||
).toEqual([]);
|
||||
});
|
||||
|
||||
test('can read author from legacy front matter', () => {
|
||||
it('can read author from legacy front matter', () => {
|
||||
expect(
|
||||
getBlogPostAuthors({
|
||||
frontMatter: {
|
||||
|
|
@ -79,7 +79,7 @@ describe('getBlogPostAuthors', () => {
|
|||
]);
|
||||
});
|
||||
|
||||
test('can read authors string', () => {
|
||||
it('can read authors string', () => {
|
||||
expect(
|
||||
getBlogPostAuthors({
|
||||
frontMatter: {
|
||||
|
|
@ -90,7 +90,7 @@ describe('getBlogPostAuthors', () => {
|
|||
).toEqual([{key: 'slorber', name: 'Sébastien Lorber'}]);
|
||||
});
|
||||
|
||||
test('can read authors string[]', () => {
|
||||
it('can read authors string[]', () => {
|
||||
expect(
|
||||
getBlogPostAuthors({
|
||||
frontMatter: {
|
||||
|
|
@ -107,7 +107,7 @@ describe('getBlogPostAuthors', () => {
|
|||
]);
|
||||
});
|
||||
|
||||
test('can read authors Author', () => {
|
||||
it('can read authors Author', () => {
|
||||
expect(
|
||||
getBlogPostAuthors({
|
||||
frontMatter: {
|
||||
|
|
@ -118,7 +118,7 @@ describe('getBlogPostAuthors', () => {
|
|||
).toEqual([{name: 'Sébastien Lorber', title: 'maintainer'}]);
|
||||
});
|
||||
|
||||
test('can read authors Author[]', () => {
|
||||
it('can read authors Author[]', () => {
|
||||
expect(
|
||||
getBlogPostAuthors({
|
||||
frontMatter: {
|
||||
|
|
@ -135,7 +135,7 @@ describe('getBlogPostAuthors', () => {
|
|||
]);
|
||||
});
|
||||
|
||||
test('can read authors complex (string | Author)[] setup with keys and local overrides', () => {
|
||||
it('can read authors complex (string | Author)[] setup with keys and local overrides', () => {
|
||||
expect(
|
||||
getBlogPostAuthors({
|
||||
frontMatter: {
|
||||
|
|
@ -166,7 +166,7 @@ describe('getBlogPostAuthors', () => {
|
|||
]);
|
||||
});
|
||||
|
||||
test('throw when using author key with no authorsMap', () => {
|
||||
it('throw when using author key with no authorsMap', () => {
|
||||
expect(() =>
|
||||
getBlogPostAuthors({
|
||||
frontMatter: {
|
||||
|
|
@ -180,7 +180,7 @@ describe('getBlogPostAuthors', () => {
|
|||
`);
|
||||
});
|
||||
|
||||
test('throw when using author key with empty authorsMap', () => {
|
||||
it('throw when using author key with empty authorsMap', () => {
|
||||
expect(() =>
|
||||
getBlogPostAuthors({
|
||||
frontMatter: {
|
||||
|
|
@ -194,7 +194,7 @@ describe('getBlogPostAuthors', () => {
|
|||
`);
|
||||
});
|
||||
|
||||
test('throw when using bad author key in string', () => {
|
||||
it('throw when using bad author key in string', () => {
|
||||
expect(() =>
|
||||
getBlogPostAuthors({
|
||||
frontMatter: {
|
||||
|
|
@ -213,7 +213,7 @@ describe('getBlogPostAuthors', () => {
|
|||
`);
|
||||
});
|
||||
|
||||
test('throw when using bad author key in string[]', () => {
|
||||
it('throw when using bad author key in string[]', () => {
|
||||
expect(() =>
|
||||
getBlogPostAuthors({
|
||||
frontMatter: {
|
||||
|
|
@ -232,7 +232,7 @@ describe('getBlogPostAuthors', () => {
|
|||
`);
|
||||
});
|
||||
|
||||
test('throw when using bad author key in Author[].key', () => {
|
||||
it('throw when using bad author key in Author[].key', () => {
|
||||
expect(() =>
|
||||
getBlogPostAuthors({
|
||||
frontMatter: {
|
||||
|
|
@ -251,7 +251,7 @@ describe('getBlogPostAuthors', () => {
|
|||
`);
|
||||
});
|
||||
|
||||
test('throw when mixing legacy/new authors front matter', () => {
|
||||
it('throw when mixing legacy/new authors front matter', () => {
|
||||
expect(() =>
|
||||
getBlogPostAuthors({
|
||||
frontMatter: {
|
||||
|
|
@ -287,7 +287,7 @@ describe('getAuthorsMap', () => {
|
|||
contentPath: fixturesDir,
|
||||
};
|
||||
|
||||
test('getAuthorsMap can read yml file', async () => {
|
||||
it('getAuthorsMap can read yml file', async () => {
|
||||
await expect(
|
||||
getAuthorsMap({
|
||||
contentPaths,
|
||||
|
|
@ -296,7 +296,7 @@ describe('getAuthorsMap', () => {
|
|||
).resolves.toBeDefined();
|
||||
});
|
||||
|
||||
test('getAuthorsMap can read json file', async () => {
|
||||
it('getAuthorsMap can read json file', async () => {
|
||||
await expect(
|
||||
getAuthorsMap({
|
||||
contentPaths,
|
||||
|
|
@ -305,7 +305,7 @@ describe('getAuthorsMap', () => {
|
|||
).resolves.toBeDefined();
|
||||
});
|
||||
|
||||
test('getAuthorsMap can return undefined if yaml file not found', async () => {
|
||||
it('getAuthorsMap can return undefined if yaml file not found', async () => {
|
||||
await expect(
|
||||
getAuthorsMap({
|
||||
contentPaths,
|
||||
|
|
@ -316,7 +316,7 @@ describe('getAuthorsMap', () => {
|
|||
});
|
||||
|
||||
describe('validateAuthorsMap', () => {
|
||||
test('accept valid authors map', () => {
|
||||
it('accept valid authors map', () => {
|
||||
const authorsMap: AuthorsMap = {
|
||||
slorber: {
|
||||
name: 'Sébastien Lorber',
|
||||
|
|
@ -338,7 +338,7 @@ describe('validateAuthorsMap', () => {
|
|||
expect(validateAuthorsMap(authorsMap)).toEqual(authorsMap);
|
||||
});
|
||||
|
||||
test('rename snake case image_url to camelCase imageURL', () => {
|
||||
it('rename snake case image_url to camelCase imageURL', () => {
|
||||
const authorsMap: AuthorsMap = {
|
||||
slorber: {
|
||||
name: 'Sébastien Lorber',
|
||||
|
|
@ -353,7 +353,7 @@ describe('validateAuthorsMap', () => {
|
|||
});
|
||||
});
|
||||
|
||||
test('accept author with only image', () => {
|
||||
it('accept author with only image', () => {
|
||||
const authorsMap: AuthorsMap = {
|
||||
slorber: {
|
||||
imageURL: 'https://github.com/slorber.png',
|
||||
|
|
@ -363,7 +363,7 @@ describe('validateAuthorsMap', () => {
|
|||
expect(validateAuthorsMap(authorsMap)).toEqual(authorsMap);
|
||||
});
|
||||
|
||||
test('reject author without name or image', () => {
|
||||
it('reject author without name or image', () => {
|
||||
const authorsMap: AuthorsMap = {
|
||||
slorber: {
|
||||
title: 'foo',
|
||||
|
|
@ -376,7 +376,7 @@ describe('validateAuthorsMap', () => {
|
|||
);
|
||||
});
|
||||
|
||||
test('reject undefined author', () => {
|
||||
it('reject undefined author', () => {
|
||||
expect(() =>
|
||||
validateAuthorsMap({
|
||||
slorber: undefined,
|
||||
|
|
@ -384,7 +384,7 @@ describe('validateAuthorsMap', () => {
|
|||
).toThrowErrorMatchingInlineSnapshot(`"\\"slorber\\" is required"`);
|
||||
});
|
||||
|
||||
test('reject null author', () => {
|
||||
it('reject null author', () => {
|
||||
expect(() =>
|
||||
validateAuthorsMap({
|
||||
slorber: null,
|
||||
|
|
@ -394,7 +394,7 @@ describe('validateAuthorsMap', () => {
|
|||
);
|
||||
});
|
||||
|
||||
test('reject array author', () => {
|
||||
it('reject array author', () => {
|
||||
expect(() =>
|
||||
validateAuthorsMap({slorber: []}),
|
||||
).toThrowErrorMatchingInlineSnapshot(
|
||||
|
|
@ -402,14 +402,14 @@ describe('validateAuthorsMap', () => {
|
|||
);
|
||||
});
|
||||
|
||||
test('reject array content', () => {
|
||||
it('reject array content', () => {
|
||||
expect(() => validateAuthorsMap([])).toThrowErrorMatchingInlineSnapshot(
|
||||
// TODO improve this error message
|
||||
`"\\"value\\" must be of type object"`,
|
||||
);
|
||||
});
|
||||
|
||||
test('reject flat author', () => {
|
||||
it('reject flat author', () => {
|
||||
expect(() =>
|
||||
validateAuthorsMap({name: 'Sébastien'}),
|
||||
).toThrowErrorMatchingInlineSnapshot(
|
||||
|
|
@ -418,7 +418,7 @@ describe('validateAuthorsMap', () => {
|
|||
);
|
||||
});
|
||||
|
||||
test('reject non-map author', () => {
|
||||
it('reject non-map author', () => {
|
||||
const authorsMap: AuthorsMap = {
|
||||
// @ts-expect-error: for tests
|
||||
slorber: [],
|
||||
|
|
|
|||
|
|
@ -24,13 +24,13 @@ function testField(params: {
|
|||
][];
|
||||
}) {
|
||||
describe(`"${params.fieldName}" field`, () => {
|
||||
test('accept valid values', () => {
|
||||
it('accept valid values', () => {
|
||||
params.validFrontMatters.forEach((frontMatter) => {
|
||||
expect(validateBlogPostFrontMatter(frontMatter)).toEqual(frontMatter);
|
||||
});
|
||||
});
|
||||
|
||||
test('convert valid values', () => {
|
||||
it('convert valid values', () => {
|
||||
params.convertibleFrontMatter?.forEach(
|
||||
([convertibleFrontMatter, convertedFrontMatter]) => {
|
||||
expect(validateBlogPostFrontMatter(convertibleFrontMatter)).toEqual(
|
||||
|
|
@ -40,7 +40,7 @@ function testField(params: {
|
|||
);
|
||||
});
|
||||
|
||||
test('throw error for values', () => {
|
||||
it('throw error for values', () => {
|
||||
params.invalidFrontMatters?.forEach(([frontMatter, message]) => {
|
||||
try {
|
||||
validateBlogPostFrontMatter(frontMatter);
|
||||
|
|
@ -64,12 +64,12 @@ function testField(params: {
|
|||
}
|
||||
|
||||
describe('validateBlogPostFrontMatter', () => {
|
||||
test('accept empty object', () => {
|
||||
it('accept empty object', () => {
|
||||
const frontMatter = {};
|
||||
expect(validateBlogPostFrontMatter(frontMatter)).toEqual(frontMatter);
|
||||
});
|
||||
|
||||
test('accept unknown field', () => {
|
||||
it('accept unknown field', () => {
|
||||
const frontMatter = {abc: '1'};
|
||||
expect(validateBlogPostFrontMatter(frontMatter)).toEqual(frontMatter);
|
||||
});
|
||||
|
|
@ -106,7 +106,7 @@ describe('validateBlogPostFrontMatter id', () => {
|
|||
});
|
||||
|
||||
describe('validateBlogPostFrontMatter handles legacy/new author front matter', () => {
|
||||
test('allow legacy author front matter', () => {
|
||||
it('allow legacy author front matter', () => {
|
||||
const frontMatter: BlogPostFrontMatter = {
|
||||
author: 'Sebastien',
|
||||
author_url: 'https://sebastienlorber.com',
|
||||
|
|
@ -116,7 +116,7 @@ describe('validateBlogPostFrontMatter handles legacy/new author front matter', (
|
|||
expect(validateBlogPostFrontMatter(frontMatter)).toEqual(frontMatter);
|
||||
});
|
||||
|
||||
test('allow new authors front matter', () => {
|
||||
it('allow new authors front matter', () => {
|
||||
const frontMatter: BlogPostFrontMatter = {
|
||||
authors: [
|
||||
'slorber',
|
||||
|
|
|
|||
|
|
@ -5,10 +5,74 @@
|
|||
* LICENSE file in the root directory of this source tree.
|
||||
*/
|
||||
|
||||
import {truncate, parseBlogFileName} from '../blogUtils';
|
||||
import {jest} from '@jest/globals';
|
||||
import {
|
||||
truncate,
|
||||
parseBlogFileName,
|
||||
linkify,
|
||||
getSourceToPermalink,
|
||||
type LinkifyParams,
|
||||
} from '../blogUtils';
|
||||
import fs from 'fs-extra';
|
||||
import path from 'path';
|
||||
import type {
|
||||
BlogBrokenMarkdownLink,
|
||||
BlogContentPaths,
|
||||
BlogPost,
|
||||
} from '../types';
|
||||
|
||||
const siteDir = path.join(__dirname, '__fixtures__', 'website');
|
||||
const contentPaths: BlogContentPaths = {
|
||||
contentPath: path.join(siteDir, 'blog-with-ref'),
|
||||
contentPathLocalized: path.join(siteDir, 'blog-with-ref-localized'),
|
||||
};
|
||||
const pluginDir = 'blog-with-ref';
|
||||
const blogPosts: BlogPost[] = [
|
||||
{
|
||||
id: 'Happy 1st Birthday Slash!',
|
||||
metadata: {
|
||||
permalink: '/blog/2018/12/14/Happy-First-Birthday-Slash',
|
||||
source: path.posix.join(
|
||||
'@site',
|
||||
pluginDir,
|
||||
'2018-12-14-Happy-First-Birthday-Slash.md',
|
||||
),
|
||||
title: 'Happy 1st Birthday Slash!',
|
||||
description: `pattern name`,
|
||||
date: new Date('2018-12-14'),
|
||||
tags: [],
|
||||
prevItem: {
|
||||
permalink: '/blog/2019/01/01/date-matter',
|
||||
title: 'date-matter',
|
||||
},
|
||||
truncated: false,
|
||||
},
|
||||
},
|
||||
];
|
||||
|
||||
const transform = async (
|
||||
filePath: string,
|
||||
options?: Partial<LinkifyParams>,
|
||||
) => {
|
||||
const fileContent = await fs.readFile(filePath, 'utf-8');
|
||||
const transformedContent = linkify({
|
||||
filePath,
|
||||
fileString: fileContent,
|
||||
siteDir,
|
||||
contentPaths,
|
||||
sourceToPermalink: getSourceToPermalink(blogPosts),
|
||||
onBrokenMarkdownLink: (brokenMarkdownLink) => {
|
||||
throw new Error(
|
||||
`Broken markdown link found: ${JSON.stringify(brokenMarkdownLink)}`,
|
||||
);
|
||||
},
|
||||
...options,
|
||||
});
|
||||
return [fileContent, transformedContent];
|
||||
};
|
||||
|
||||
describe('truncate', () => {
|
||||
test('truncates texts', () => {
|
||||
it('truncates texts', () => {
|
||||
expect(
|
||||
truncate('aaa\n<!-- truncate -->\nbbb\nccc', /<!-- truncate -->/),
|
||||
).toEqual('aaa\n');
|
||||
|
|
@ -16,7 +80,8 @@ describe('truncate', () => {
|
|||
truncate('\n<!-- truncate -->\nbbb\nccc', /<!-- truncate -->/),
|
||||
).toEqual('\n');
|
||||
});
|
||||
test('leaves texts without markers', () => {
|
||||
|
||||
it('leaves texts without markers', () => {
|
||||
expect(truncate('aaa\nbbb\nccc', /<!-- truncate -->/)).toEqual(
|
||||
'aaa\nbbb\nccc',
|
||||
);
|
||||
|
|
@ -25,7 +90,7 @@ describe('truncate', () => {
|
|||
});
|
||||
|
||||
describe('parseBlogFileName', () => {
|
||||
test('parse file', () => {
|
||||
it('parses file', () => {
|
||||
expect(parseBlogFileName('some-post.md')).toEqual({
|
||||
date: undefined,
|
||||
text: 'some-post',
|
||||
|
|
@ -33,7 +98,7 @@ describe('parseBlogFileName', () => {
|
|||
});
|
||||
});
|
||||
|
||||
test('parse folder', () => {
|
||||
it('parses folder', () => {
|
||||
expect(parseBlogFileName('some-post/index.md')).toEqual({
|
||||
date: undefined,
|
||||
text: 'some-post',
|
||||
|
|
@ -41,7 +106,7 @@ describe('parseBlogFileName', () => {
|
|||
});
|
||||
});
|
||||
|
||||
test('parse nested file', () => {
|
||||
it('parses nested file', () => {
|
||||
expect(parseBlogFileName('some-post/some-file.md')).toEqual({
|
||||
date: undefined,
|
||||
text: 'some-post/some-file',
|
||||
|
|
@ -49,7 +114,7 @@ describe('parseBlogFileName', () => {
|
|||
});
|
||||
});
|
||||
|
||||
test('parse nested folder', () => {
|
||||
it('parses nested folder', () => {
|
||||
expect(parseBlogFileName('some-post/some-subfolder/index.md')).toEqual({
|
||||
date: undefined,
|
||||
text: 'some-post/some-subfolder',
|
||||
|
|
@ -57,7 +122,7 @@ describe('parseBlogFileName', () => {
|
|||
});
|
||||
});
|
||||
|
||||
test('parse file respecting date convention', () => {
|
||||
it('parses file respecting date convention', () => {
|
||||
expect(
|
||||
parseBlogFileName('2021-05-12-announcing-docusaurus-two-beta.md'),
|
||||
).toEqual({
|
||||
|
|
@ -67,7 +132,7 @@ describe('parseBlogFileName', () => {
|
|||
});
|
||||
});
|
||||
|
||||
test('parse folder name respecting date convention', () => {
|
||||
it('parses folder name respecting date convention', () => {
|
||||
expect(
|
||||
parseBlogFileName('2021-05-12-announcing-docusaurus-two-beta/index.md'),
|
||||
).toEqual({
|
||||
|
|
@ -77,7 +142,7 @@ describe('parseBlogFileName', () => {
|
|||
});
|
||||
});
|
||||
|
||||
test('parse folder tree respecting date convention', () => {
|
||||
it('parses folder tree respecting date convention', () => {
|
||||
expect(
|
||||
parseBlogFileName('2021/05/12/announcing-docusaurus-two-beta/index.md'),
|
||||
).toEqual({
|
||||
|
|
@ -87,7 +152,7 @@ describe('parseBlogFileName', () => {
|
|||
});
|
||||
});
|
||||
|
||||
test('parse folder name/tree (mixed) respecting date convention', () => {
|
||||
it('parses folder name/tree (mixed) respecting date convention', () => {
|
||||
expect(
|
||||
parseBlogFileName('2021/05-12-announcing-docusaurus-two-beta/index.md'),
|
||||
).toEqual({
|
||||
|
|
@ -97,7 +162,7 @@ describe('parseBlogFileName', () => {
|
|||
});
|
||||
});
|
||||
|
||||
test('parse nested folder tree respecting date convention', () => {
|
||||
it('parses nested folder tree respecting date convention', () => {
|
||||
expect(
|
||||
parseBlogFileName(
|
||||
'2021/05/12/announcing-docusaurus-two-beta/subfolder/subfile.md',
|
||||
|
|
@ -109,7 +174,7 @@ describe('parseBlogFileName', () => {
|
|||
});
|
||||
});
|
||||
|
||||
test('parse date in the middle of path', () => {
|
||||
it('parses date in the middle of path', () => {
|
||||
expect(
|
||||
parseBlogFileName('team-a/2021/05/12/announcing-docusaurus-two-beta.md'),
|
||||
).toEqual({
|
||||
|
|
@ -119,7 +184,7 @@ describe('parseBlogFileName', () => {
|
|||
});
|
||||
});
|
||||
|
||||
test('parse date in the middle of a folder name', () => {
|
||||
it('parses date in the middle of a folder name', () => {
|
||||
expect(
|
||||
parseBlogFileName(
|
||||
'team-a-2021-05-12-hey/announcing-docusaurus-two-beta.md',
|
||||
|
|
@ -131,3 +196,40 @@ describe('parseBlogFileName', () => {
|
|||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('linkify', () => {
|
||||
it('transforms to correct link', async () => {
|
||||
const post = path.join(contentPaths.contentPath, 'post.md');
|
||||
const [content, transformedContent] = await transform(post);
|
||||
expect(transformedContent).toMatchSnapshot();
|
||||
expect(transformedContent).toContain(
|
||||
'](/blog/2018/12/14/Happy-First-Birthday-Slash',
|
||||
);
|
||||
expect(transformedContent).not.toContain(
|
||||
'](2018-12-14-Happy-First-Birthday-Slash.md)',
|
||||
);
|
||||
expect(content).not.toEqual(transformedContent);
|
||||
});
|
||||
|
||||
it('reports broken markdown links', async () => {
|
||||
const filePath = 'post-with-broken-links.md';
|
||||
const folderPath = contentPaths.contentPath;
|
||||
const postWithBrokenLinks = path.join(folderPath, filePath);
|
||||
const onBrokenMarkdownLink = jest.fn();
|
||||
const [, transformedContent] = await transform(postWithBrokenLinks, {
|
||||
onBrokenMarkdownLink,
|
||||
});
|
||||
expect(transformedContent).toMatchSnapshot();
|
||||
expect(onBrokenMarkdownLink).toHaveBeenCalledTimes(2);
|
||||
expect(onBrokenMarkdownLink).toHaveBeenNthCalledWith(1, {
|
||||
filePath: path.resolve(folderPath, filePath),
|
||||
contentPaths,
|
||||
link: 'postNotExist1.md',
|
||||
} as BlogBrokenMarkdownLink);
|
||||
expect(onBrokenMarkdownLink).toHaveBeenNthCalledWith(2, {
|
||||
filePath: path.resolve(folderPath, filePath),
|
||||
contentPaths,
|
||||
link: './postNotExist2.mdx',
|
||||
} as BlogBrokenMarkdownLink);
|
||||
});
|
||||
});
|
||||
|
|
|
|||
|
|
@ -52,85 +52,81 @@ async function testGenerateFeeds(
|
|||
});
|
||||
}
|
||||
|
||||
describe('blogFeed', () => {
|
||||
(['atom', 'rss', 'json'] as const).forEach((feedType) => {
|
||||
describe(`${feedType}`, () => {
|
||||
const fsMock = jest.spyOn(fs, 'outputFile').mockImplementation(() => {});
|
||||
describe.each(['atom', 'rss', 'json'])('%s', (feedType) => {
|
||||
const fsMock = jest.spyOn(fs, 'outputFile').mockImplementation(() => {});
|
||||
|
||||
test('should not show feed without posts', async () => {
|
||||
const siteDir = __dirname;
|
||||
const siteConfig = {
|
||||
title: 'Hello',
|
||||
baseUrl: '/',
|
||||
url: 'https://docusaurus.io',
|
||||
favicon: 'image/favicon.ico',
|
||||
};
|
||||
const outDir = path.join(siteDir, 'build-snap');
|
||||
it('does not get generated without posts', async () => {
|
||||
const siteDir = __dirname;
|
||||
const siteConfig = {
|
||||
title: 'Hello',
|
||||
baseUrl: '/',
|
||||
url: 'https://docusaurus.io',
|
||||
favicon: 'image/favicon.ico',
|
||||
};
|
||||
const outDir = path.join(siteDir, 'build-snap');
|
||||
|
||||
await testGenerateFeeds(
|
||||
{
|
||||
siteDir,
|
||||
siteConfig,
|
||||
i18n: DefaultI18N,
|
||||
outDir,
|
||||
} as LoadContext,
|
||||
{
|
||||
path: 'invalid-blog-path',
|
||||
routeBasePath: 'blog',
|
||||
tagsBasePath: 'tags',
|
||||
authorsMapPath: 'authors.yml',
|
||||
include: ['*.md', '*.mdx'],
|
||||
feedOptions: {
|
||||
type: [feedType],
|
||||
copyright: 'Copyright',
|
||||
},
|
||||
readingTime: ({content, defaultReadingTime}) =>
|
||||
defaultReadingTime({content}),
|
||||
} as PluginOptions,
|
||||
);
|
||||
await testGenerateFeeds(
|
||||
{
|
||||
siteDir,
|
||||
siteConfig,
|
||||
i18n: DefaultI18N,
|
||||
outDir,
|
||||
} as LoadContext,
|
||||
{
|
||||
path: 'invalid-blog-path',
|
||||
routeBasePath: 'blog',
|
||||
tagsBasePath: 'tags',
|
||||
authorsMapPath: 'authors.yml',
|
||||
include: ['*.md', '*.mdx'],
|
||||
feedOptions: {
|
||||
type: [feedType],
|
||||
copyright: 'Copyright',
|
||||
},
|
||||
readingTime: ({content, defaultReadingTime}) =>
|
||||
defaultReadingTime({content}),
|
||||
} as PluginOptions,
|
||||
);
|
||||
|
||||
expect(fsMock).toBeCalledTimes(0);
|
||||
fsMock.mockClear();
|
||||
});
|
||||
expect(fsMock).toBeCalledTimes(0);
|
||||
fsMock.mockClear();
|
||||
});
|
||||
|
||||
test('shows feed item for each post', async () => {
|
||||
const siteDir = path.join(__dirname, '__fixtures__', 'website');
|
||||
const outDir = path.join(siteDir, 'build-snap');
|
||||
const siteConfig = {
|
||||
title: 'Hello',
|
||||
baseUrl: '/myBaseUrl/',
|
||||
url: 'https://docusaurus.io',
|
||||
favicon: 'image/favicon.ico',
|
||||
};
|
||||
it('has feed item for each post', async () => {
|
||||
const siteDir = path.join(__dirname, '__fixtures__', 'website');
|
||||
const outDir = path.join(siteDir, 'build-snap');
|
||||
const siteConfig = {
|
||||
title: 'Hello',
|
||||
baseUrl: '/myBaseUrl/',
|
||||
url: 'https://docusaurus.io',
|
||||
favicon: 'image/favicon.ico',
|
||||
};
|
||||
|
||||
// Build is quite difficult to mock, so we built the blog beforehand and
|
||||
// copied the output to the fixture...
|
||||
await testGenerateFeeds(
|
||||
{
|
||||
siteDir,
|
||||
siteConfig,
|
||||
i18n: DefaultI18N,
|
||||
outDir,
|
||||
} as LoadContext,
|
||||
{
|
||||
path: 'blog',
|
||||
routeBasePath: 'blog',
|
||||
tagsBasePath: 'tags',
|
||||
authorsMapPath: 'authors.yml',
|
||||
include: DEFAULT_OPTIONS.include,
|
||||
exclude: DEFAULT_OPTIONS.exclude,
|
||||
feedOptions: {
|
||||
type: [feedType],
|
||||
copyright: 'Copyright',
|
||||
},
|
||||
readingTime: ({content, defaultReadingTime}) =>
|
||||
defaultReadingTime({content}),
|
||||
} as PluginOptions,
|
||||
);
|
||||
// Build is quite difficult to mock, so we built the blog beforehand and
|
||||
// copied the output to the fixture...
|
||||
await testGenerateFeeds(
|
||||
{
|
||||
siteDir,
|
||||
siteConfig,
|
||||
i18n: DefaultI18N,
|
||||
outDir,
|
||||
} as LoadContext,
|
||||
{
|
||||
path: 'blog',
|
||||
routeBasePath: 'blog',
|
||||
tagsBasePath: 'tags',
|
||||
authorsMapPath: 'authors.yml',
|
||||
include: DEFAULT_OPTIONS.include,
|
||||
exclude: DEFAULT_OPTIONS.exclude,
|
||||
feedOptions: {
|
||||
type: [feedType],
|
||||
copyright: 'Copyright',
|
||||
},
|
||||
readingTime: ({content, defaultReadingTime}) =>
|
||||
defaultReadingTime({content}),
|
||||
} as PluginOptions,
|
||||
);
|
||||
|
||||
expect(fsMock.mock.calls.map((call) => call[1])).toMatchSnapshot();
|
||||
fsMock.mockClear();
|
||||
});
|
||||
});
|
||||
expect(fsMock.mock.calls.map((call) => call[1])).toMatchSnapshot();
|
||||
fsMock.mockClear();
|
||||
});
|
||||
});
|
||||
|
|
|
|||
|
|
@ -59,58 +59,58 @@ function validateAndNormalize(
|
|||
}
|
||||
}
|
||||
|
||||
describe('loadBlog', () => {
|
||||
const PluginPath = 'blog';
|
||||
const PluginPath = 'blog';
|
||||
|
||||
const BaseEditUrl = 'https://baseEditUrl.com/edit';
|
||||
const BaseEditUrl = 'https://baseEditUrl.com/edit';
|
||||
|
||||
const getPlugin = async (
|
||||
siteDir: string,
|
||||
pluginOptions: Partial<PluginOptions> = {},
|
||||
i18n: I18n = DefaultI18N,
|
||||
) => {
|
||||
const generatedFilesDir: string = path.resolve(siteDir, '.docusaurus');
|
||||
const siteConfig = {
|
||||
title: 'Hello',
|
||||
baseUrl: '/',
|
||||
url: 'https://docusaurus.io',
|
||||
} as DocusaurusConfig;
|
||||
return pluginContentBlog(
|
||||
{
|
||||
siteDir,
|
||||
siteConfig,
|
||||
generatedFilesDir,
|
||||
i18n,
|
||||
} as LoadContext,
|
||||
validateAndNormalize(PluginOptionSchema, {
|
||||
path: PluginPath,
|
||||
editUrl: BaseEditUrl,
|
||||
...pluginOptions,
|
||||
}),
|
||||
);
|
||||
};
|
||||
const getPlugin = async (
|
||||
siteDir: string,
|
||||
pluginOptions: Partial<PluginOptions> = {},
|
||||
i18n: I18n = DefaultI18N,
|
||||
) => {
|
||||
const generatedFilesDir: string = path.resolve(siteDir, '.docusaurus');
|
||||
const siteConfig = {
|
||||
title: 'Hello',
|
||||
baseUrl: '/',
|
||||
url: 'https://docusaurus.io',
|
||||
} as DocusaurusConfig;
|
||||
return pluginContentBlog(
|
||||
{
|
||||
siteDir,
|
||||
siteConfig,
|
||||
generatedFilesDir,
|
||||
i18n,
|
||||
} as LoadContext,
|
||||
validateAndNormalize(PluginOptionSchema, {
|
||||
path: PluginPath,
|
||||
editUrl: BaseEditUrl,
|
||||
...pluginOptions,
|
||||
}),
|
||||
);
|
||||
};
|
||||
|
||||
const getBlogPosts = async (
|
||||
siteDir: string,
|
||||
pluginOptions: Partial<PluginOptions> = {},
|
||||
i18n: I18n = DefaultI18N,
|
||||
) => {
|
||||
const plugin = await getPlugin(siteDir, pluginOptions, i18n);
|
||||
const {blogPosts} = (await plugin.loadContent!())!;
|
||||
return blogPosts;
|
||||
};
|
||||
const getBlogPosts = async (
|
||||
siteDir: string,
|
||||
pluginOptions: Partial<PluginOptions> = {},
|
||||
i18n: I18n = DefaultI18N,
|
||||
) => {
|
||||
const plugin = await getPlugin(siteDir, pluginOptions, i18n);
|
||||
const {blogPosts} = (await plugin.loadContent!())!;
|
||||
return blogPosts;
|
||||
};
|
||||
|
||||
const getBlogTags = async (
|
||||
siteDir: string,
|
||||
pluginOptions: Partial<PluginOptions> = {},
|
||||
i18n: I18n = DefaultI18N,
|
||||
) => {
|
||||
const plugin = await getPlugin(siteDir, pluginOptions, i18n);
|
||||
const {blogTags} = (await plugin.loadContent!())!;
|
||||
return blogTags;
|
||||
};
|
||||
const getBlogTags = async (
|
||||
siteDir: string,
|
||||
pluginOptions: Partial<PluginOptions> = {},
|
||||
i18n: I18n = DefaultI18N,
|
||||
) => {
|
||||
const plugin = await getPlugin(siteDir, pluginOptions, i18n);
|
||||
const {blogTags} = (await plugin.loadContent!())!;
|
||||
return blogTags;
|
||||
};
|
||||
|
||||
test('getPathsToWatch', async () => {
|
||||
describe('blog plugin', () => {
|
||||
it('getPathsToWatch returns right files', async () => {
|
||||
const siteDir = path.join(__dirname, '__fixtures__', 'website');
|
||||
const plugin = await getPlugin(siteDir);
|
||||
const pathsToWatch = plugin.getPathsToWatch!();
|
||||
|
|
@ -124,7 +124,7 @@ describe('loadBlog', () => {
|
|||
]);
|
||||
});
|
||||
|
||||
test('simple website', async () => {
|
||||
it('builds a simple website', async () => {
|
||||
const siteDir = path.join(__dirname, '__fixtures__', 'website');
|
||||
const blogPosts = await getBlogPosts(siteDir);
|
||||
|
||||
|
|
@ -303,7 +303,7 @@ describe('loadBlog', () => {
|
|||
});
|
||||
});
|
||||
|
||||
test('simple website blog dates localized', async () => {
|
||||
it('builds simple website blog with localized dates', async () => {
|
||||
const siteDir = path.join(__dirname, '__fixtures__', 'website');
|
||||
const blogPostsFrench = await getBlogPosts(siteDir, {}, getI18n('fr'));
|
||||
expect(blogPostsFrench).toHaveLength(8);
|
||||
|
|
@ -333,7 +333,7 @@ describe('loadBlog', () => {
|
|||
);
|
||||
});
|
||||
|
||||
test('edit url with editLocalizedBlogs true', async () => {
|
||||
it('handles edit URL with editLocalizedBlogs: true', async () => {
|
||||
const siteDir = path.join(__dirname, '__fixtures__', 'website');
|
||||
const blogPosts = await getBlogPosts(siteDir, {editLocalizedFiles: true});
|
||||
|
||||
|
|
@ -346,7 +346,7 @@ describe('loadBlog', () => {
|
|||
);
|
||||
});
|
||||
|
||||
test('edit url with editUrl function', async () => {
|
||||
it('handles edit URL with editUrl function', async () => {
|
||||
const siteDir = path.join(__dirname, '__fixtures__', 'website');
|
||||
|
||||
const hardcodedEditUrl = 'hardcoded-edit-url';
|
||||
|
|
@ -410,7 +410,7 @@ describe('loadBlog', () => {
|
|||
});
|
||||
});
|
||||
|
||||
test('draft blog post not exists in production build', async () => {
|
||||
it('excludes draft blog post from production build', async () => {
|
||||
process.env.NODE_ENV = 'production';
|
||||
const siteDir = path.join(__dirname, '__fixtures__', 'website');
|
||||
const blogPosts = await getBlogPosts(siteDir);
|
||||
|
|
@ -418,7 +418,7 @@ describe('loadBlog', () => {
|
|||
expect(blogPosts.find((v) => v.metadata.title === 'draft')).toBeUndefined();
|
||||
});
|
||||
|
||||
test('create blog post without date', async () => {
|
||||
it('creates blog post without date', async () => {
|
||||
const siteDir = path.join(
|
||||
__dirname,
|
||||
'__fixtures__',
|
||||
|
|
@ -457,7 +457,7 @@ describe('loadBlog', () => {
|
|||
});
|
||||
});
|
||||
|
||||
test('test ascending sort direction of blog post', async () => {
|
||||
it('can sort blog posts in ascending order', async () => {
|
||||
const siteDir = path.join(__dirname, '__fixtures__', 'website');
|
||||
const normalOrder = await getBlogPosts(siteDir);
|
||||
const reversedOrder = await getBlogPosts(siteDir, {
|
||||
|
|
@ -468,7 +468,7 @@ describe('loadBlog', () => {
|
|||
);
|
||||
});
|
||||
|
||||
test('test blog tags', async () => {
|
||||
it('works with blog tags', async () => {
|
||||
const siteDir = path.join(
|
||||
__dirname,
|
||||
'__fixtures__',
|
||||
|
|
@ -482,7 +482,7 @@ describe('loadBlog', () => {
|
|||
expect(blogTags).toMatchSnapshot();
|
||||
});
|
||||
|
||||
test('test blog tags: no pagination', async () => {
|
||||
it('works on blog tags without pagination', async () => {
|
||||
const siteDir = path.join(
|
||||
__dirname,
|
||||
'__fixtures__',
|
||||
|
|
|
|||
|
|
@ -1,101 +0,0 @@
|
|||
/**
|
||||
* Copyright (c) Facebook, Inc. and its affiliates.
|
||||
*
|
||||
* This source code is licensed under the MIT license found in the
|
||||
* LICENSE file in the root directory of this source tree.
|
||||
*/
|
||||
|
||||
import {jest} from '@jest/globals';
|
||||
import fs from 'fs-extra';
|
||||
import path from 'path';
|
||||
import {linkify, type LinkifyParams, getSourceToPermalink} from '../blogUtils';
|
||||
import type {
|
||||
BlogBrokenMarkdownLink,
|
||||
BlogContentPaths,
|
||||
BlogPost,
|
||||
} from '../types';
|
||||
|
||||
const siteDir = path.join(__dirname, '__fixtures__', 'website');
|
||||
const contentPaths: BlogContentPaths = {
|
||||
contentPath: path.join(siteDir, 'blog-with-ref'),
|
||||
contentPathLocalized: path.join(siteDir, 'blog-with-ref-localized'),
|
||||
};
|
||||
const pluginDir = 'blog-with-ref';
|
||||
const blogPosts: BlogPost[] = [
|
||||
{
|
||||
id: 'Happy 1st Birthday Slash!',
|
||||
metadata: {
|
||||
permalink: '/blog/2018/12/14/Happy-First-Birthday-Slash',
|
||||
source: path.posix.join(
|
||||
'@site',
|
||||
pluginDir,
|
||||
'2018-12-14-Happy-First-Birthday-Slash.md',
|
||||
),
|
||||
title: 'Happy 1st Birthday Slash!',
|
||||
description: `pattern name`,
|
||||
date: new Date('2018-12-14'),
|
||||
tags: [],
|
||||
prevItem: {
|
||||
permalink: '/blog/2019/01/01/date-matter',
|
||||
title: 'date-matter',
|
||||
},
|
||||
truncated: false,
|
||||
},
|
||||
},
|
||||
];
|
||||
|
||||
const transform = async (
|
||||
filePath: string,
|
||||
options?: Partial<LinkifyParams>,
|
||||
) => {
|
||||
const fileContent = await fs.readFile(filePath, 'utf-8');
|
||||
const transformedContent = linkify({
|
||||
filePath,
|
||||
fileString: fileContent,
|
||||
siteDir,
|
||||
contentPaths,
|
||||
sourceToPermalink: getSourceToPermalink(blogPosts),
|
||||
onBrokenMarkdownLink: (brokenMarkdownLink) => {
|
||||
throw new Error(
|
||||
`Broken markdown link found: ${JSON.stringify(brokenMarkdownLink)}`,
|
||||
);
|
||||
},
|
||||
...options,
|
||||
});
|
||||
return [fileContent, transformedContent];
|
||||
};
|
||||
|
||||
test('transform to correct link', async () => {
|
||||
const post = path.join(contentPaths.contentPath, 'post.md');
|
||||
const [content, transformedContent] = await transform(post);
|
||||
expect(transformedContent).toMatchSnapshot();
|
||||
expect(transformedContent).toContain(
|
||||
'](/blog/2018/12/14/Happy-First-Birthday-Slash',
|
||||
);
|
||||
expect(transformedContent).not.toContain(
|
||||
'](2018-12-14-Happy-First-Birthday-Slash.md)',
|
||||
);
|
||||
expect(content).not.toEqual(transformedContent);
|
||||
});
|
||||
|
||||
test('report broken markdown links', async () => {
|
||||
const filePath = 'post-with-broken-links.md';
|
||||
const folderPath = contentPaths.contentPath;
|
||||
const postWithBrokenLinks = path.join(folderPath, filePath);
|
||||
const onBrokenMarkdownLink = jest.fn();
|
||||
const [, transformedContent] = await transform(postWithBrokenLinks, {
|
||||
onBrokenMarkdownLink,
|
||||
});
|
||||
expect(transformedContent).toMatchSnapshot();
|
||||
expect(onBrokenMarkdownLink).toHaveBeenCalledTimes(2);
|
||||
expect(onBrokenMarkdownLink).toHaveBeenNthCalledWith(1, {
|
||||
filePath: path.resolve(folderPath, filePath),
|
||||
contentPaths,
|
||||
link: 'postNotExist1.md',
|
||||
} as BlogBrokenMarkdownLink);
|
||||
expect(onBrokenMarkdownLink).toHaveBeenNthCalledWith(2, {
|
||||
filePath: path.resolve(folderPath, filePath),
|
||||
contentPaths,
|
||||
link: './postNotExist2.mdx',
|
||||
} as BlogBrokenMarkdownLink);
|
||||
});
|
||||
|
|
@ -11,121 +11,123 @@ import {PluginOptionSchema, DEFAULT_OPTIONS} from '../pluginOptionSchema';
|
|||
const markdownPluginsFunctionStub = () => {};
|
||||
const markdownPluginsObjectStub = {};
|
||||
|
||||
test('should normalize options', () => {
|
||||
const {value, error} = PluginOptionSchema.validate({});
|
||||
expect(value).toEqual(DEFAULT_OPTIONS);
|
||||
expect(error).toBe(undefined);
|
||||
});
|
||||
|
||||
test('should accept correctly defined user options', () => {
|
||||
const userOptions = {
|
||||
...DEFAULT_OPTIONS,
|
||||
feedOptions: {type: 'rss', title: 'myTitle'},
|
||||
path: 'not_blog',
|
||||
routeBasePath: 'myBlog',
|
||||
postsPerPage: 5,
|
||||
include: ['api/*', 'docs/*'],
|
||||
};
|
||||
const {value, error} = PluginOptionSchema.validate(userOptions);
|
||||
expect(value).toEqual({
|
||||
...userOptions,
|
||||
feedOptions: {type: ['rss'], title: 'myTitle', copyright: ''},
|
||||
});
|
||||
expect(error).toBe(undefined);
|
||||
});
|
||||
|
||||
test('should accept valid user options', async () => {
|
||||
const userOptions = {
|
||||
...DEFAULT_OPTIONS,
|
||||
routeBasePath: 'myBlog',
|
||||
beforeDefaultRemarkPlugins: [],
|
||||
beforeDefaultRehypePlugins: [markdownPluginsFunctionStub],
|
||||
remarkPlugins: [[markdownPluginsFunctionStub, {option1: '42'}]],
|
||||
rehypePlugins: [
|
||||
markdownPluginsObjectStub,
|
||||
[markdownPluginsFunctionStub, {option1: '42'}],
|
||||
],
|
||||
};
|
||||
const {value, error} = await PluginOptionSchema.validate(userOptions);
|
||||
expect(value).toEqual(userOptions);
|
||||
expect(error).toBe(undefined);
|
||||
});
|
||||
|
||||
test('should throw Error in case of invalid options', () => {
|
||||
const {error} = PluginOptionSchema.validate({
|
||||
path: 'not_blog',
|
||||
postsPerPage: -1,
|
||||
include: ['api/*', 'docs/*'],
|
||||
routeBasePath: 'not_blog',
|
||||
describe('blog plugin options schema', () => {
|
||||
it('normalizes options', () => {
|
||||
const {value, error} = PluginOptionSchema.validate({});
|
||||
expect(value).toEqual(DEFAULT_OPTIONS);
|
||||
expect(error).toBe(undefined);
|
||||
});
|
||||
|
||||
expect(error).toMatchSnapshot();
|
||||
});
|
||||
|
||||
test('should throw Error in case of invalid feedtype', () => {
|
||||
const {error} = PluginOptionSchema.validate({
|
||||
feedOptions: {
|
||||
type: 'none',
|
||||
},
|
||||
it('accepts correctly defined user options', () => {
|
||||
const userOptions = {
|
||||
...DEFAULT_OPTIONS,
|
||||
feedOptions: {type: 'rss', title: 'myTitle'},
|
||||
path: 'not_blog',
|
||||
routeBasePath: 'myBlog',
|
||||
postsPerPage: 5,
|
||||
include: ['api/*', 'docs/*'],
|
||||
};
|
||||
const {value, error} = PluginOptionSchema.validate(userOptions);
|
||||
expect(value).toEqual({
|
||||
...userOptions,
|
||||
feedOptions: {type: ['rss'], title: 'myTitle', copyright: ''},
|
||||
});
|
||||
expect(error).toBe(undefined);
|
||||
});
|
||||
|
||||
expect(error).toMatchSnapshot();
|
||||
});
|
||||
it('accepts valid user options', async () => {
|
||||
const userOptions = {
|
||||
...DEFAULT_OPTIONS,
|
||||
routeBasePath: 'myBlog',
|
||||
beforeDefaultRemarkPlugins: [],
|
||||
beforeDefaultRehypePlugins: [markdownPluginsFunctionStub],
|
||||
remarkPlugins: [[markdownPluginsFunctionStub, {option1: '42'}]],
|
||||
rehypePlugins: [
|
||||
markdownPluginsObjectStub,
|
||||
[markdownPluginsFunctionStub, {option1: '42'}],
|
||||
],
|
||||
};
|
||||
const {value, error} = PluginOptionSchema.validate(userOptions);
|
||||
expect(value).toEqual(userOptions);
|
||||
expect(error).toBe(undefined);
|
||||
});
|
||||
|
||||
test('should convert all feed type to array with other feed type', () => {
|
||||
const {value} = PluginOptionSchema.validate({
|
||||
feedOptions: {type: 'all'},
|
||||
});
|
||||
expect(value).toEqual({
|
||||
...DEFAULT_OPTIONS,
|
||||
feedOptions: {type: ['rss', 'atom', 'json'], copyright: ''},
|
||||
});
|
||||
});
|
||||
it('throws Error in case of invalid options', () => {
|
||||
const {error} = PluginOptionSchema.validate({
|
||||
path: 'not_blog',
|
||||
postsPerPage: -1,
|
||||
include: ['api/*', 'docs/*'],
|
||||
routeBasePath: 'not_blog',
|
||||
});
|
||||
|
||||
test('should accept null type and return same', () => {
|
||||
const {value, error} = PluginOptionSchema.validate({
|
||||
feedOptions: {type: null},
|
||||
expect(error).toMatchSnapshot();
|
||||
});
|
||||
expect(value).toEqual({
|
||||
...DEFAULT_OPTIONS,
|
||||
feedOptions: {type: null},
|
||||
});
|
||||
expect(error).toBe(undefined);
|
||||
});
|
||||
|
||||
test('should contain array with rss + atom for missing feed type', () => {
|
||||
const {value} = PluginOptionSchema.validate({
|
||||
feedOptions: {},
|
||||
});
|
||||
expect(value).toEqual(DEFAULT_OPTIONS);
|
||||
});
|
||||
it('throws Error in case of invalid feedtype', () => {
|
||||
const {error} = PluginOptionSchema.validate({
|
||||
feedOptions: {
|
||||
type: 'none',
|
||||
},
|
||||
});
|
||||
|
||||
test('should have array with rss + atom, title for missing feed type', () => {
|
||||
const {value} = PluginOptionSchema.validate({
|
||||
feedOptions: {title: 'title'},
|
||||
expect(error).toMatchSnapshot();
|
||||
});
|
||||
expect(value).toEqual({
|
||||
...DEFAULT_OPTIONS,
|
||||
feedOptions: {type: ['rss', 'atom'], title: 'title', copyright: ''},
|
||||
|
||||
it('converts all feed type to array with other feed type', () => {
|
||||
const {value} = PluginOptionSchema.validate({
|
||||
feedOptions: {type: 'all'},
|
||||
});
|
||||
expect(value).toEqual({
|
||||
...DEFAULT_OPTIONS,
|
||||
feedOptions: {type: ['rss', 'atom', 'json'], copyright: ''},
|
||||
});
|
||||
});
|
||||
|
||||
it('accepts null type and return same', () => {
|
||||
const {value, error} = PluginOptionSchema.validate({
|
||||
feedOptions: {type: null},
|
||||
});
|
||||
expect(value).toEqual({
|
||||
...DEFAULT_OPTIONS,
|
||||
feedOptions: {type: null},
|
||||
});
|
||||
expect(error).toBe(undefined);
|
||||
});
|
||||
|
||||
it('contains array with rss + atom for missing feed type', () => {
|
||||
const {value} = PluginOptionSchema.validate({
|
||||
feedOptions: {},
|
||||
});
|
||||
expect(value).toEqual(DEFAULT_OPTIONS);
|
||||
});
|
||||
|
||||
it('has array with rss + atom, title for missing feed type', () => {
|
||||
const {value} = PluginOptionSchema.validate({
|
||||
feedOptions: {title: 'title'},
|
||||
});
|
||||
expect(value).toEqual({
|
||||
...DEFAULT_OPTIONS,
|
||||
feedOptions: {type: ['rss', 'atom'], title: 'title', copyright: ''},
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('blog sidebar', () => {
|
||||
test('should accept 0 sidebar count', () => {
|
||||
it('accepts 0 sidebar count', () => {
|
||||
const userOptions = {blogSidebarCount: 0};
|
||||
const {value, error} = PluginOptionSchema.validate(userOptions);
|
||||
expect(value).toEqual({...DEFAULT_OPTIONS, ...userOptions});
|
||||
expect(error).toBe(undefined);
|
||||
});
|
||||
|
||||
test('should accept "ALL" sidebar count', () => {
|
||||
it('accepts "ALL" sidebar count', () => {
|
||||
const userOptions = {blogSidebarCount: 'ALL'};
|
||||
const {value, error} = PluginOptionSchema.validate(userOptions);
|
||||
expect(value).toEqual({...DEFAULT_OPTIONS, ...userOptions});
|
||||
expect(error).toBe(undefined);
|
||||
});
|
||||
|
||||
test('should reject "abcdef" sidebar count', () => {
|
||||
it('rejects "abcdef" sidebar count', () => {
|
||||
const userOptions = {blogSidebarCount: 'abcdef'};
|
||||
const {error} = PluginOptionSchema.validate(userOptions);
|
||||
expect(error).toMatchInlineSnapshot(
|
||||
|
|
@ -133,14 +135,14 @@ describe('blog sidebar', () => {
|
|||
);
|
||||
});
|
||||
|
||||
test('should accept "all posts" sidebar title', () => {
|
||||
it('accepts "all posts" sidebar title', () => {
|
||||
const userOptions = {blogSidebarTitle: 'all posts'};
|
||||
const {value, error} = PluginOptionSchema.validate(userOptions);
|
||||
expect(value).toEqual({...DEFAULT_OPTIONS, ...userOptions});
|
||||
expect(error).toBe(undefined);
|
||||
});
|
||||
|
||||
test('should reject 42 sidebar title', () => {
|
||||
it('rejects 42 sidebar title', () => {
|
||||
const userOptions = {blogSidebarTitle: 42};
|
||||
const {error} = PluginOptionSchema.validate(userOptions);
|
||||
expect(error).toMatchInlineSnapshot(
|
||||
|
|
|
|||
|
|
@ -71,26 +71,26 @@ function getSampleTranslationFilesTranslated() {
|
|||
}
|
||||
|
||||
describe('getContentTranslationFiles', () => {
|
||||
test('should return translation files matching snapshot', async () => {
|
||||
it('returns translation files matching snapshot', async () => {
|
||||
expect(getSampleTranslationFiles()).toMatchSnapshot();
|
||||
});
|
||||
});
|
||||
|
||||
describe('translateContent', () => {
|
||||
test('should fallback when translation is incomplete', () => {
|
||||
it('falls back when translation is incomplete', () => {
|
||||
expect(
|
||||
translateContent(sampleBlogContent, [{path: 'foo', content: {}}]),
|
||||
).toMatchSnapshot();
|
||||
});
|
||||
|
||||
test('should not translate anything if translation files are untranslated', () => {
|
||||
it('does not translate anything if translation files are untranslated', () => {
|
||||
const translationFiles = getSampleTranslationFiles();
|
||||
expect(translateContent(sampleBlogContent, translationFiles)).toEqual(
|
||||
sampleBlogContent,
|
||||
);
|
||||
});
|
||||
|
||||
test('should return translated loaded content matching snapshot', () => {
|
||||
it('returns translated loaded', () => {
|
||||
const translationFiles = getSampleTranslationFilesTranslated();
|
||||
expect(
|
||||
translateContent(sampleBlogContent, translationFiles),
|
||||
|
|
|
|||
|
|
@ -1,26 +1,19 @@
|
|||
// Jest Snapshot v1, https://goo.gl/fbAQLP
|
||||
|
||||
exports[`sidebar site with wrong sidebar content 1`] = `
|
||||
"Invalid sidebar file at \\"packages/docusaurus-plugin-content-docs/src/__tests__/__fixtures__/simple-site/wrong-sidebars.json\\".
|
||||
These sidebar document ids do not exist:
|
||||
- goku
|
||||
|
||||
Available document ids are:
|
||||
- doc with space
|
||||
- foo/bar
|
||||
- foo/baz
|
||||
- headingAsTitle
|
||||
- hello
|
||||
- ipsum
|
||||
- lorem
|
||||
- rootAbsoluteSlug
|
||||
- rootRelativeSlug
|
||||
- rootResolvedSlug
|
||||
- rootTryToEscapeSlug
|
||||
- slugs/absoluteSlug
|
||||
- slugs/relativeSlug
|
||||
- slugs/resolvedSlug
|
||||
- slugs/tryToEscapeSlug"
|
||||
exports[`sidebar site with undefined sidebar 1`] = `
|
||||
Object {
|
||||
"defaultSidebar": Array [
|
||||
Object {
|
||||
"id": "hello-1",
|
||||
"type": "doc",
|
||||
},
|
||||
Object {
|
||||
"id": "hello-2",
|
||||
"label": "Hello 2 From Doc",
|
||||
"type": "doc",
|
||||
},
|
||||
],
|
||||
}
|
||||
`;
|
||||
|
||||
exports[`simple website content 1`] = `
|
||||
|
|
@ -1247,6 +1240,15 @@ Array [
|
|||
]
|
||||
`;
|
||||
|
||||
exports[`simple website getPathToWatch 1`] = `
|
||||
Array [
|
||||
"sidebars.json",
|
||||
"i18n/en/docusaurus-plugin-content-docs/current/**/*.{md,mdx}",
|
||||
"docs/**/*.{md,mdx}",
|
||||
"docs/**/_category_.{json,yml,yaml}",
|
||||
]
|
||||
`;
|
||||
|
||||
exports[`site with custom sidebar items generator sidebar is autogenerated according to a custom sidebarItemsGenerator 1`] = `
|
||||
Object {
|
||||
"defaultSidebar": Array [
|
||||
|
|
@ -2511,6 +2513,19 @@ Array [
|
|||
]
|
||||
`;
|
||||
|
||||
exports[`versioned website (community) getPathToWatch 1`] = `
|
||||
Array [
|
||||
"community_sidebars.json",
|
||||
"i18n/en/docusaurus-plugin-content-docs-community/current/**/*.{md,mdx}",
|
||||
"community/**/*.{md,mdx}",
|
||||
"community/**/_category_.{json,yml,yaml}",
|
||||
"community_versioned_sidebars/version-1.0.0-sidebars.json",
|
||||
"i18n/en/docusaurus-plugin-content-docs-community/version-1.0.0/**/*.{md,mdx}",
|
||||
"community_versioned_docs/version-1.0.0/**/*.{md,mdx}",
|
||||
"community_versioned_docs/version-1.0.0/**/_category_.{json,yml,yaml}",
|
||||
]
|
||||
`;
|
||||
|
||||
exports[`versioned website content 1`] = `
|
||||
Object {
|
||||
"description": "This is next version of bar.",
|
||||
|
|
@ -3885,3 +3900,24 @@ Object {
|
|||
],
|
||||
}
|
||||
`;
|
||||
|
||||
exports[`versioned website getPathToWatch 1`] = `
|
||||
Array [
|
||||
"sidebars.json",
|
||||
"i18n/en/docusaurus-plugin-content-docs/current/**/*.{md,mdx}",
|
||||
"docs/**/*.{md,mdx}",
|
||||
"docs/**/_category_.{json,yml,yaml}",
|
||||
"versioned_sidebars/version-1.0.1-sidebars.json",
|
||||
"i18n/en/docusaurus-plugin-content-docs/version-1.0.1/**/*.{md,mdx}",
|
||||
"versioned_docs/version-1.0.1/**/*.{md,mdx}",
|
||||
"versioned_docs/version-1.0.1/**/_category_.{json,yml,yaml}",
|
||||
"versioned_sidebars/version-1.0.0-sidebars.json",
|
||||
"i18n/en/docusaurus-plugin-content-docs/version-1.0.0/**/*.{md,mdx}",
|
||||
"versioned_docs/version-1.0.0/**/*.{md,mdx}",
|
||||
"versioned_docs/version-1.0.0/**/_category_.{json,yml,yaml}",
|
||||
"versioned_sidebars/version-withSlugs-sidebars.json",
|
||||
"i18n/en/docusaurus-plugin-content-docs/version-withSlugs/**/*.{md,mdx}",
|
||||
"versioned_docs/version-withSlugs/**/*.{md,mdx}",
|
||||
"versioned_docs/version-withSlugs/**/_category_.{json,yml,yaml}",
|
||||
]
|
||||
`;
|
||||
|
|
|
|||
|
|
@ -1,6 +1,6 @@
|
|||
// Jest Snapshot v1, https://goo.gl/fbAQLP
|
||||
|
||||
exports[`getLoadedContentTranslationFiles should return translation files matching snapshot 1`] = `
|
||||
exports[`getLoadedContentTranslationFiles returns translation files 1`] = `
|
||||
Array [
|
||||
Object {
|
||||
"content": Object {
|
||||
|
|
@ -80,7 +80,7 @@ Array [
|
|||
]
|
||||
`;
|
||||
|
||||
exports[`translateLoadedContent should return translated loaded content matching snapshot 1`] = `
|
||||
exports[`translateLoadedContent returns translated loaded content 1`] = `
|
||||
Object {
|
||||
"loadedVersions": Array [
|
||||
Object {
|
||||
|
|
|
|||
|
|
@ -33,7 +33,7 @@ describe('docsVersion', () => {
|
|||
sidebarCollapsible: true,
|
||||
};
|
||||
|
||||
test('no version tag provided', async () => {
|
||||
it('no version tag provided', async () => {
|
||||
await expect(() =>
|
||||
cliDocsVersionCommand(
|
||||
null,
|
||||
|
|
@ -66,7 +66,7 @@ describe('docsVersion', () => {
|
|||
);
|
||||
});
|
||||
|
||||
test('version tag should not have slash', async () => {
|
||||
it('version tag should not have slash', async () => {
|
||||
await expect(() =>
|
||||
cliDocsVersionCommand(
|
||||
'foo/bar',
|
||||
|
|
@ -89,7 +89,7 @@ describe('docsVersion', () => {
|
|||
);
|
||||
});
|
||||
|
||||
test('version tag should not be too long', async () => {
|
||||
it('version tag should not be too long', async () => {
|
||||
await expect(() =>
|
||||
cliDocsVersionCommand(
|
||||
'a'.repeat(255),
|
||||
|
|
@ -102,7 +102,7 @@ describe('docsVersion', () => {
|
|||
);
|
||||
});
|
||||
|
||||
test('version tag should not be a dot or two dots', async () => {
|
||||
it('version tag should not be a dot or two dots', async () => {
|
||||
await expect(() =>
|
||||
cliDocsVersionCommand(
|
||||
'..',
|
||||
|
|
@ -125,7 +125,7 @@ describe('docsVersion', () => {
|
|||
);
|
||||
});
|
||||
|
||||
test('version tag should be a valid pathname', async () => {
|
||||
it('version tag should be a valid pathname', async () => {
|
||||
await expect(() =>
|
||||
cliDocsVersionCommand(
|
||||
'<foo|bar>',
|
||||
|
|
@ -158,7 +158,7 @@ describe('docsVersion', () => {
|
|||
);
|
||||
});
|
||||
|
||||
test('version tag already exist', async () => {
|
||||
it('version tag already exist', async () => {
|
||||
await expect(() =>
|
||||
cliDocsVersionCommand(
|
||||
'1.0.0',
|
||||
|
|
@ -171,7 +171,7 @@ describe('docsVersion', () => {
|
|||
);
|
||||
});
|
||||
|
||||
test('no docs file to version', async () => {
|
||||
it('no docs file to version', async () => {
|
||||
const emptySiteDir = path.join(fixtureDir, 'empty-site');
|
||||
await expect(() =>
|
||||
cliDocsVersionCommand(
|
||||
|
|
@ -185,8 +185,8 @@ describe('docsVersion', () => {
|
|||
);
|
||||
});
|
||||
|
||||
test('first time versioning', async () => {
|
||||
const copyMock = jest.spyOn(fs, 'copy').mockImplementation();
|
||||
it('first time versioning', async () => {
|
||||
const copyMock = jest.spyOn(fs, 'copy').mockImplementation(() => {});
|
||||
const writeMock = jest.spyOn(fs, 'outputFile');
|
||||
let versionedSidebar;
|
||||
let versionedSidebarPath;
|
||||
|
|
@ -200,7 +200,7 @@ describe('docsVersion', () => {
|
|||
versionsPath = filepath;
|
||||
versions = JSON.parse(content as string);
|
||||
});
|
||||
const consoleMock = jest.spyOn(console, 'log').mockImplementation();
|
||||
const consoleMock = jest.spyOn(console, 'log').mockImplementation(() => {});
|
||||
const options = {
|
||||
...DEFAULT_OPTIONS,
|
||||
sidebarPath: path.join(simpleSiteDir, 'sidebars.json'),
|
||||
|
|
@ -240,8 +240,8 @@ describe('docsVersion', () => {
|
|||
consoleMock.mockRestore();
|
||||
});
|
||||
|
||||
test('not the first time versioning', async () => {
|
||||
const copyMock = jest.spyOn(fs, 'copy').mockImplementation();
|
||||
it('not the first time versioning', async () => {
|
||||
const copyMock = jest.spyOn(fs, 'copy').mockImplementation(() => {});
|
||||
const writeMock = jest.spyOn(fs, 'outputFile');
|
||||
let versionedSidebar;
|
||||
let versionedSidebarPath;
|
||||
|
|
@ -255,7 +255,7 @@ describe('docsVersion', () => {
|
|||
versionsPath = filepath;
|
||||
versions = JSON.parse(content as string);
|
||||
});
|
||||
const consoleMock = jest.spyOn(console, 'log').mockImplementation();
|
||||
const consoleMock = jest.spyOn(console, 'log').mockImplementation(() => {});
|
||||
const options = {
|
||||
...DEFAULT_OPTIONS,
|
||||
sidebarPath: path.join(versionedSiteDir, 'sidebars.json'),
|
||||
|
|
@ -295,10 +295,10 @@ describe('docsVersion', () => {
|
|||
consoleMock.mockRestore();
|
||||
});
|
||||
|
||||
test('second docs instance versioning', async () => {
|
||||
it('second docs instance versioning', async () => {
|
||||
const pluginId = 'community';
|
||||
|
||||
const copyMock = jest.spyOn(fs, 'copy').mockImplementation();
|
||||
const copyMock = jest.spyOn(fs, 'copy').mockImplementation(() => {});
|
||||
const writeMock = jest.spyOn(fs, 'outputFile');
|
||||
let versionedSidebar;
|
||||
let versionedSidebarPath;
|
||||
|
|
@ -312,7 +312,7 @@ describe('docsVersion', () => {
|
|||
versionsPath = filepath;
|
||||
versions = JSON.parse(content as string);
|
||||
});
|
||||
const consoleMock = jest.spyOn(console, 'log').mockImplementation();
|
||||
const consoleMock = jest.spyOn(console, 'log').mockImplementation(() => {});
|
||||
const options = {
|
||||
...DEFAULT_OPTIONS,
|
||||
path: 'community',
|
||||
|
|
|
|||
|
|
@ -21,12 +21,14 @@ function testField(params: {
|
|||
ErrorMessage: string,
|
||||
][];
|
||||
}) {
|
||||
// eslint-disable-next-line jest/require-top-level-describe
|
||||
test(`[${params.prefix}] accept valid values`, () => {
|
||||
params.validFrontMatters.forEach((frontMatter) => {
|
||||
expect(validateDocFrontMatter(frontMatter)).toEqual(frontMatter);
|
||||
});
|
||||
});
|
||||
|
||||
// eslint-disable-next-line jest/require-top-level-describe
|
||||
test(`[${params.prefix}] convert valid values`, () => {
|
||||
params.convertibleFrontMatter?.forEach(
|
||||
([convertibleFrontMatter, convertedFrontMatter]) => {
|
||||
|
|
@ -37,6 +39,7 @@ function testField(params: {
|
|||
);
|
||||
});
|
||||
|
||||
// eslint-disable-next-line jest/require-top-level-describe
|
||||
test(`[${params.prefix}] throw error for values`, () => {
|
||||
params.invalidFrontMatters?.forEach(([frontMatter, message]) => {
|
||||
try {
|
||||
|
|
@ -59,13 +62,13 @@ function testField(params: {
|
|||
});
|
||||
}
|
||||
|
||||
describe('validateDocFrontMatter', () => {
|
||||
test('accept empty object', () => {
|
||||
describe('doc front matter schema', () => {
|
||||
it('accepts empty object', () => {
|
||||
const frontMatter: DocFrontMatter = {};
|
||||
expect(validateDocFrontMatter(frontMatter)).toEqual(frontMatter);
|
||||
});
|
||||
|
||||
test('accept unknown field', () => {
|
||||
it('accepts unknown field', () => {
|
||||
const frontMatter = {abc: '1'};
|
||||
expect(validateDocFrontMatter(frontMatter)).toEqual(frontMatter);
|
||||
});
|
||||
|
|
@ -277,7 +280,7 @@ describe('validateDocFrontMatter tags', () => {
|
|||
});
|
||||
});
|
||||
|
||||
describe('validateDocFrontMatter toc_min_heading_level', () => {
|
||||
describe('toc_min_heading_level', () => {
|
||||
testField({
|
||||
prefix: 'toc_min_heading_level',
|
||||
validFrontMatters: [
|
||||
|
|
@ -313,7 +316,7 @@ describe('validateDocFrontMatter toc_min_heading_level', () => {
|
|||
});
|
||||
});
|
||||
|
||||
describe('validateDocFrontMatter toc_max_heading_level', () => {
|
||||
describe('toc_max_heading_level', () => {
|
||||
testField({
|
||||
prefix: 'toc_max_heading_level',
|
||||
validFrontMatters: [
|
||||
|
|
@ -349,7 +352,7 @@ describe('validateDocFrontMatter toc_max_heading_level', () => {
|
|||
});
|
||||
});
|
||||
|
||||
describe('validateDocFrontMatter toc min/max consistency', () => {
|
||||
describe('toc min/max consistency', () => {
|
||||
testField({
|
||||
prefix: 'toc min/max',
|
||||
validFrontMatters: [
|
||||
|
|
|
|||
|
|
@ -195,7 +195,7 @@ describe('simple site', () => {
|
|||
};
|
||||
}
|
||||
|
||||
test('readVersionDocs', async () => {
|
||||
it('readVersionDocs', async () => {
|
||||
const {options, currentVersion} = await loadSite();
|
||||
const docs = await readVersionDocs(currentVersion, options);
|
||||
expect(docs.map((doc) => doc.source).sort()).toEqual(
|
||||
|
|
@ -219,7 +219,7 @@ describe('simple site', () => {
|
|||
);
|
||||
});
|
||||
|
||||
test('normal docs', async () => {
|
||||
it('normal docs', async () => {
|
||||
const {defaultTestUtils} = await loadSite();
|
||||
await defaultTestUtils.testMeta(path.join('foo', 'bar.md'), {
|
||||
version: 'current',
|
||||
|
|
@ -268,7 +268,7 @@ describe('simple site', () => {
|
|||
});
|
||||
});
|
||||
|
||||
test('docs with editUrl', async () => {
|
||||
it('docs with editUrl', async () => {
|
||||
const {siteDir, context, options, currentVersion} = await loadSite({
|
||||
options: {
|
||||
editUrl: 'https://github.com/facebook/docusaurus/edit/main/website',
|
||||
|
|
@ -317,7 +317,7 @@ describe('simple site', () => {
|
|||
});
|
||||
});
|
||||
|
||||
test('docs with custom editUrl & unrelated frontMatter', async () => {
|
||||
it('docs with custom editUrl & unrelated frontMatter', async () => {
|
||||
const {defaultTestUtils} = await loadSite();
|
||||
|
||||
await defaultTestUtils.testMeta('lorem.md', {
|
||||
|
|
@ -338,7 +338,7 @@ describe('simple site', () => {
|
|||
});
|
||||
});
|
||||
|
||||
test('docs with function editUrl', async () => {
|
||||
it('docs with function editUrl', async () => {
|
||||
const hardcodedEditUrl = 'hardcoded-edit-url';
|
||||
|
||||
const editUrlFunction: EditUrlFunction = jest.fn(() => hardcodedEditUrl);
|
||||
|
|
@ -399,7 +399,7 @@ describe('simple site', () => {
|
|||
});
|
||||
});
|
||||
|
||||
test('docs with last update time and author', async () => {
|
||||
it('docs with last update time and author', async () => {
|
||||
const {siteDir, context, options, currentVersion} = await loadSite({
|
||||
options: {
|
||||
showLastUpdateAuthor: true,
|
||||
|
|
@ -435,7 +435,7 @@ describe('simple site', () => {
|
|||
});
|
||||
});
|
||||
|
||||
test('docs with slugs', async () => {
|
||||
it('docs with slugs', async () => {
|
||||
const {defaultTestUtils} = await loadSite();
|
||||
|
||||
await defaultTestUtils.testSlug(
|
||||
|
|
@ -473,7 +473,7 @@ describe('simple site', () => {
|
|||
);
|
||||
});
|
||||
|
||||
test('docs with invalid id', async () => {
|
||||
it('docs with invalid id', async () => {
|
||||
const {defaultTestUtils} = await loadSite();
|
||||
await expect(async () =>
|
||||
defaultTestUtils.processDocFile(
|
||||
|
|
@ -489,7 +489,7 @@ describe('simple site', () => {
|
|||
);
|
||||
});
|
||||
|
||||
test('custom pagination', async () => {
|
||||
it('custom pagination', async () => {
|
||||
const {defaultTestUtils, options, versionsMetadata} = await loadSite();
|
||||
const docs = await readVersionDocs(versionsMetadata[0], options);
|
||||
await expect(
|
||||
|
|
@ -497,7 +497,7 @@ describe('simple site', () => {
|
|||
).resolves.toMatchSnapshot();
|
||||
});
|
||||
|
||||
test('bad pagination', async () => {
|
||||
it('bad pagination', async () => {
|
||||
const {defaultTestUtils, options, versionsMetadata} = await loadSite();
|
||||
const docs = await readVersionDocs(versionsMetadata[0], options);
|
||||
docs.push(
|
||||
|
|
@ -577,7 +577,7 @@ describe('versioned site', () => {
|
|||
};
|
||||
}
|
||||
|
||||
test('next docs', async () => {
|
||||
it('next docs', async () => {
|
||||
const {currentVersionTestUtils} = await loadSite();
|
||||
|
||||
await currentVersionTestUtils.testMeta(path.join('foo', 'bar.md'), {
|
||||
|
|
@ -631,7 +631,7 @@ describe('versioned site', () => {
|
|||
});
|
||||
});
|
||||
|
||||
test('versioned docs', async () => {
|
||||
it('versioned docs', async () => {
|
||||
const {version101TestUtils, version100TestUtils} = await loadSite();
|
||||
|
||||
await version100TestUtils.testMeta(path.join('foo', 'bar.md'), {
|
||||
|
|
@ -690,7 +690,7 @@ describe('versioned site', () => {
|
|||
});
|
||||
});
|
||||
|
||||
test('next doc slugs', async () => {
|
||||
it('next doc slugs', async () => {
|
||||
const {currentVersionTestUtils} = await loadSite();
|
||||
|
||||
await currentVersionTestUtils.testSlug(
|
||||
|
|
@ -711,7 +711,7 @@ describe('versioned site', () => {
|
|||
);
|
||||
});
|
||||
|
||||
test('versioned doc slugs', async () => {
|
||||
it('versioned doc slugs', async () => {
|
||||
const {versionWithSlugsTestUtils} = await loadSite();
|
||||
|
||||
await versionWithSlugsTestUtils.testSlug(
|
||||
|
|
@ -749,7 +749,7 @@ describe('versioned site', () => {
|
|||
);
|
||||
});
|
||||
|
||||
test('doc with editUrl function', async () => {
|
||||
it('doc with editUrl function', async () => {
|
||||
const hardcodedEditUrl = 'hardcoded-edit-url';
|
||||
|
||||
const editUrlFunction: EditUrlFunction = jest.fn(() => hardcodedEditUrl);
|
||||
|
|
@ -795,7 +795,7 @@ describe('versioned site', () => {
|
|||
});
|
||||
});
|
||||
|
||||
test('translated doc with editUrl', async () => {
|
||||
it('translated doc with editUrl', async () => {
|
||||
const {siteDir, context, options, version100} = await loadSite({
|
||||
options: {
|
||||
editUrl: 'https://github.com/facebook/docusaurus/edit/main/website',
|
||||
|
|
@ -830,7 +830,7 @@ describe('versioned site', () => {
|
|||
});
|
||||
});
|
||||
|
||||
test('translated en doc with editUrl and editCurrentVersion=true', async () => {
|
||||
it('translated en doc with editUrl and editCurrentVersion=true', async () => {
|
||||
const {siteDir, context, options, version100} = await loadSite({
|
||||
options: {
|
||||
editUrl: 'https://github.com/facebook/docusaurus/edit/main/website',
|
||||
|
|
@ -865,7 +865,7 @@ describe('versioned site', () => {
|
|||
});
|
||||
});
|
||||
|
||||
test('translated fr doc with editUrl and editLocalizedFiles=true', async () => {
|
||||
it('translated fr doc with editUrl and editLocalizedFiles=true', async () => {
|
||||
const {siteDir, context, options, version100} = await loadSite({
|
||||
options: {
|
||||
editUrl: 'https://github.com/facebook/docusaurus/edit/main/website',
|
||||
|
|
@ -901,7 +901,7 @@ describe('versioned site', () => {
|
|||
});
|
||||
});
|
||||
|
||||
test('translated fr doc with editUrl and editLocalizedFiles=true + editCurrentVersion=true', async () => {
|
||||
it('translated fr doc with editUrl and editLocalizedFiles=true + editCurrentVersion=true', async () => {
|
||||
const {siteDir, context, options, version100} = await loadSite({
|
||||
options: {
|
||||
editUrl: 'https://github.com/facebook/docusaurus/edit/main/website',
|
||||
|
|
@ -940,7 +940,7 @@ describe('versioned site', () => {
|
|||
});
|
||||
|
||||
describe('isConventionalDocIndex', () => {
|
||||
test('supports readme', () => {
|
||||
it('supports readme', () => {
|
||||
expect(
|
||||
isCategoryIndex({
|
||||
fileName: 'readme',
|
||||
|
|
@ -971,7 +971,7 @@ describe('isConventionalDocIndex', () => {
|
|||
).toEqual(true);
|
||||
});
|
||||
|
||||
test('supports index', () => {
|
||||
it('supports index', () => {
|
||||
expect(
|
||||
isCategoryIndex({
|
||||
fileName: 'index',
|
||||
|
|
@ -1002,7 +1002,7 @@ describe('isConventionalDocIndex', () => {
|
|||
).toEqual(true);
|
||||
});
|
||||
|
||||
test('supports <categoryName>/<categoryName>.md', () => {
|
||||
it('supports <categoryName>/<categoryName>.md', () => {
|
||||
expect(
|
||||
isCategoryIndex({
|
||||
fileName: 'someCategory',
|
||||
|
|
@ -1040,7 +1040,7 @@ describe('isConventionalDocIndex', () => {
|
|||
).toEqual(true);
|
||||
});
|
||||
|
||||
test('reject other cases', () => {
|
||||
it('reject other cases', () => {
|
||||
expect(
|
||||
isCategoryIndex({
|
||||
fileName: 'some_Category',
|
||||
|
|
|
|||
|
|
@ -113,7 +113,7 @@ Entries created:
|
|||
};
|
||||
|
||||
describe('sidebar', () => {
|
||||
test('site with wrong sidebar content', async () => {
|
||||
it('site with wrong sidebar content', async () => {
|
||||
const siteDir = path.join(__dirname, '__fixtures__', 'simple-site');
|
||||
const context = await loadContext(siteDir);
|
||||
const sidebarPath = path.join(siteDir, 'wrong-sidebars.json');
|
||||
|
|
@ -123,10 +123,32 @@ describe('sidebar', () => {
|
|||
sidebarPath,
|
||||
}),
|
||||
);
|
||||
await expect(plugin.loadContent!()).rejects.toThrowErrorMatchingSnapshot();
|
||||
await expect(plugin.loadContent!()).rejects
|
||||
.toThrowErrorMatchingInlineSnapshot(`
|
||||
"Invalid sidebar file at \\"packages/docusaurus-plugin-content-docs/src/__tests__/__fixtures__/simple-site/wrong-sidebars.json\\".
|
||||
These sidebar document ids do not exist:
|
||||
- goku
|
||||
|
||||
Available document ids are:
|
||||
- doc with space
|
||||
- foo/bar
|
||||
- foo/baz
|
||||
- headingAsTitle
|
||||
- hello
|
||||
- ipsum
|
||||
- lorem
|
||||
- rootAbsoluteSlug
|
||||
- rootRelativeSlug
|
||||
- rootResolvedSlug
|
||||
- rootTryToEscapeSlug
|
||||
- slugs/absoluteSlug
|
||||
- slugs/relativeSlug
|
||||
- slugs/resolvedSlug
|
||||
- slugs/tryToEscapeSlug"
|
||||
`);
|
||||
});
|
||||
|
||||
test('site with wrong sidebar file path', async () => {
|
||||
it('site with wrong sidebar file path', async () => {
|
||||
const siteDir = path.join(__dirname, '__fixtures__', 'site-with-doc-label');
|
||||
const context = await loadContext(siteDir);
|
||||
|
||||
|
|
@ -147,7 +169,7 @@ describe('sidebar', () => {
|
|||
`);
|
||||
});
|
||||
|
||||
test('site with undefined sidebar', async () => {
|
||||
it('site with undefined sidebar', async () => {
|
||||
const siteDir = path.join(__dirname, '__fixtures__', 'site-with-doc-label');
|
||||
const context = await loadContext(siteDir);
|
||||
const plugin = await pluginContentDocs(
|
||||
|
|
@ -159,24 +181,10 @@ describe('sidebar', () => {
|
|||
const result = await plugin.loadContent!();
|
||||
|
||||
expect(result.loadedVersions).toHaveLength(1);
|
||||
expect(result.loadedVersions[0].sidebars).toMatchInlineSnapshot(`
|
||||
Object {
|
||||
"defaultSidebar": Array [
|
||||
Object {
|
||||
"id": "hello-1",
|
||||
"type": "doc",
|
||||
},
|
||||
Object {
|
||||
"id": "hello-2",
|
||||
"label": "Hello 2 From Doc",
|
||||
"type": "doc",
|
||||
},
|
||||
],
|
||||
}
|
||||
`);
|
||||
expect(result.loadedVersions[0].sidebars).toMatchSnapshot();
|
||||
});
|
||||
|
||||
test('site with disabled sidebar', async () => {
|
||||
it('site with disabled sidebar', async () => {
|
||||
const siteDir = path.join(__dirname, '__fixtures__', 'site-with-doc-label');
|
||||
const context = await loadContext(siteDir);
|
||||
const plugin = await pluginContentDocs(
|
||||
|
|
@ -195,7 +203,7 @@ describe('sidebar', () => {
|
|||
describe('empty/no docs website', () => {
|
||||
const siteDir = path.join(__dirname, '__fixtures__', 'empty-site');
|
||||
|
||||
test('no files in docs folder', async () => {
|
||||
it('no files in docs folder', async () => {
|
||||
const context = await loadContext(siteDir);
|
||||
await fs.ensureDir(path.join(siteDir, 'docs'));
|
||||
const plugin = await pluginContentDocs(
|
||||
|
|
@ -209,7 +217,7 @@ describe('empty/no docs website', () => {
|
|||
);
|
||||
});
|
||||
|
||||
test('docs folder does not exist', async () => {
|
||||
it('docs folder does not exist', async () => {
|
||||
const context = await loadContext(siteDir);
|
||||
await expect(
|
||||
pluginContentDocs(
|
||||
|
|
@ -245,11 +253,11 @@ describe('simple website', () => {
|
|||
return {siteDir, context, sidebarPath, plugin, pluginContentDir};
|
||||
}
|
||||
|
||||
test('extendCli - docsVersion', async () => {
|
||||
it('extendCli - docsVersion', async () => {
|
||||
const {siteDir, sidebarPath, plugin} = await loadSite();
|
||||
const mock = jest
|
||||
.spyOn(cliDocs, 'cliDocsVersionCommand')
|
||||
.mockImplementation();
|
||||
.mockImplementation(async () => {});
|
||||
const cli = new commander.Command();
|
||||
// @ts-expect-error: in actual usage, we pass the static commander instead
|
||||
// of the new command
|
||||
|
|
@ -265,22 +273,14 @@ describe('simple website', () => {
|
|||
mock.mockRestore();
|
||||
});
|
||||
|
||||
test('getPathToWatch', async () => {
|
||||
it('getPathToWatch', async () => {
|
||||
const {siteDir, plugin} = await loadSite();
|
||||
|
||||
const pathToWatch = plugin.getPathsToWatch!();
|
||||
const matchPattern = pathToWatch.map((filepath) =>
|
||||
posixPath(path.relative(siteDir, filepath)),
|
||||
);
|
||||
expect(matchPattern).not.toEqual([]);
|
||||
expect(matchPattern).toMatchInlineSnapshot(`
|
||||
Array [
|
||||
"sidebars.json",
|
||||
"i18n/en/docusaurus-plugin-content-docs/current/**/*.{md,mdx}",
|
||||
"docs/**/*.{md,mdx}",
|
||||
"docs/**/_category_.{json,yml,yaml}",
|
||||
]
|
||||
`);
|
||||
expect(matchPattern).toMatchSnapshot();
|
||||
expect(isMatch('docs/hello.md', matchPattern)).toEqual(true);
|
||||
expect(isMatch('docs/hello.mdx', matchPattern)).toEqual(true);
|
||||
expect(isMatch('docs/foo/bar.md', matchPattern)).toEqual(true);
|
||||
|
|
@ -294,7 +294,7 @@ describe('simple website', () => {
|
|||
expect(isMatch('super/docs/hello.md', matchPattern)).toEqual(false);
|
||||
});
|
||||
|
||||
test('configureWebpack', async () => {
|
||||
it('configureWebpack', async () => {
|
||||
const {plugin} = await loadSite();
|
||||
|
||||
const content = await plugin.loadContent?.();
|
||||
|
|
@ -316,7 +316,7 @@ describe('simple website', () => {
|
|||
expect(errors).toBeUndefined();
|
||||
});
|
||||
|
||||
test('content', async () => {
|
||||
it('content', async () => {
|
||||
const {plugin, pluginContentDir} = await loadSite();
|
||||
const content = await plugin.loadContent!();
|
||||
expect(content.loadedVersions.length).toEqual(1);
|
||||
|
|
@ -370,11 +370,11 @@ describe('versioned website', () => {
|
|||
};
|
||||
}
|
||||
|
||||
test('extendCli - docsVersion', async () => {
|
||||
it('extendCli - docsVersion', async () => {
|
||||
const {siteDir, routeBasePath, sidebarPath, plugin} = await loadSite();
|
||||
const mock = jest
|
||||
.spyOn(cliDocs, 'cliDocsVersionCommand')
|
||||
.mockImplementation();
|
||||
.mockImplementation(async () => {});
|
||||
const cli = new commander.Command();
|
||||
// @ts-expect-error: in actual usage, we pass the static commander instead
|
||||
// of the new command
|
||||
|
|
@ -390,33 +390,14 @@ describe('versioned website', () => {
|
|||
mock.mockRestore();
|
||||
});
|
||||
|
||||
test('getPathToWatch', async () => {
|
||||
it('getPathToWatch', async () => {
|
||||
const {siteDir, plugin} = await loadSite();
|
||||
const pathToWatch = plugin.getPathsToWatch!();
|
||||
const matchPattern = pathToWatch.map((filepath) =>
|
||||
posixPath(path.relative(siteDir, filepath)),
|
||||
);
|
||||
expect(matchPattern).not.toEqual([]);
|
||||
expect(matchPattern).toMatchInlineSnapshot(`
|
||||
Array [
|
||||
"sidebars.json",
|
||||
"i18n/en/docusaurus-plugin-content-docs/current/**/*.{md,mdx}",
|
||||
"docs/**/*.{md,mdx}",
|
||||
"docs/**/_category_.{json,yml,yaml}",
|
||||
"versioned_sidebars/version-1.0.1-sidebars.json",
|
||||
"i18n/en/docusaurus-plugin-content-docs/version-1.0.1/**/*.{md,mdx}",
|
||||
"versioned_docs/version-1.0.1/**/*.{md,mdx}",
|
||||
"versioned_docs/version-1.0.1/**/_category_.{json,yml,yaml}",
|
||||
"versioned_sidebars/version-1.0.0-sidebars.json",
|
||||
"i18n/en/docusaurus-plugin-content-docs/version-1.0.0/**/*.{md,mdx}",
|
||||
"versioned_docs/version-1.0.0/**/*.{md,mdx}",
|
||||
"versioned_docs/version-1.0.0/**/_category_.{json,yml,yaml}",
|
||||
"versioned_sidebars/version-withSlugs-sidebars.json",
|
||||
"i18n/en/docusaurus-plugin-content-docs/version-withSlugs/**/*.{md,mdx}",
|
||||
"versioned_docs/version-withSlugs/**/*.{md,mdx}",
|
||||
"versioned_docs/version-withSlugs/**/_category_.{json,yml,yaml}",
|
||||
]
|
||||
`);
|
||||
expect(matchPattern).toMatchSnapshot();
|
||||
expect(isMatch('docs/hello.md', matchPattern)).toEqual(true);
|
||||
expect(isMatch('docs/hello.mdx', matchPattern)).toEqual(true);
|
||||
expect(isMatch('docs/foo/bar.md', matchPattern)).toEqual(true);
|
||||
|
|
@ -449,7 +430,7 @@ describe('versioned website', () => {
|
|||
expect(isMatch('super/docs/hello.md', matchPattern)).toEqual(false);
|
||||
});
|
||||
|
||||
test('content', async () => {
|
||||
it('content', async () => {
|
||||
const {plugin, pluginContentDir} = await loadSite();
|
||||
const content = await plugin.loadContent!();
|
||||
expect(content.loadedVersions.length).toEqual(4);
|
||||
|
|
@ -519,12 +500,12 @@ describe('versioned website (community)', () => {
|
|||
};
|
||||
}
|
||||
|
||||
test('extendCli - docsVersion', async () => {
|
||||
it('extendCli - docsVersion', async () => {
|
||||
const {siteDir, routeBasePath, sidebarPath, pluginId, plugin} =
|
||||
await loadSite();
|
||||
const mock = jest
|
||||
.spyOn(cliDocs, 'cliDocsVersionCommand')
|
||||
.mockImplementation();
|
||||
.mockImplementation(async () => {});
|
||||
const cli = new commander.Command();
|
||||
// @ts-expect-error: in actual usage, we pass the static commander instead
|
||||
// of the new command
|
||||
|
|
@ -540,25 +521,14 @@ describe('versioned website (community)', () => {
|
|||
mock.mockRestore();
|
||||
});
|
||||
|
||||
test('getPathToWatch', async () => {
|
||||
it('getPathToWatch', async () => {
|
||||
const {siteDir, plugin} = await loadSite();
|
||||
const pathToWatch = plugin.getPathsToWatch!();
|
||||
const matchPattern = pathToWatch.map((filepath) =>
|
||||
posixPath(path.relative(siteDir, filepath)),
|
||||
);
|
||||
expect(matchPattern).not.toEqual([]);
|
||||
expect(matchPattern).toMatchInlineSnapshot(`
|
||||
Array [
|
||||
"community_sidebars.json",
|
||||
"i18n/en/docusaurus-plugin-content-docs-community/current/**/*.{md,mdx}",
|
||||
"community/**/*.{md,mdx}",
|
||||
"community/**/_category_.{json,yml,yaml}",
|
||||
"community_versioned_sidebars/version-1.0.0-sidebars.json",
|
||||
"i18n/en/docusaurus-plugin-content-docs-community/version-1.0.0/**/*.{md,mdx}",
|
||||
"community_versioned_docs/version-1.0.0/**/*.{md,mdx}",
|
||||
"community_versioned_docs/version-1.0.0/**/_category_.{json,yml,yaml}",
|
||||
]
|
||||
`);
|
||||
expect(matchPattern).toMatchSnapshot();
|
||||
expect(isMatch('community/team.md', matchPattern)).toEqual(true);
|
||||
expect(
|
||||
isMatch('community_versioned_docs/version-1.0.0/team.md', matchPattern),
|
||||
|
|
@ -581,7 +551,7 @@ describe('versioned website (community)', () => {
|
|||
).toEqual(false);
|
||||
});
|
||||
|
||||
test('content', async () => {
|
||||
it('content', async () => {
|
||||
const {plugin, pluginContentDir} = await loadSite();
|
||||
const content = await plugin.loadContent!();
|
||||
expect(content.loadedVersions.length).toEqual(2);
|
||||
|
|
@ -625,20 +595,20 @@ describe('site with doc label', () => {
|
|||
return {content};
|
||||
}
|
||||
|
||||
test('label in sidebar.json is used', async () => {
|
||||
it('label in sidebar.json is used', async () => {
|
||||
const {content} = await loadSite();
|
||||
const loadedVersion = content.loadedVersions[0];
|
||||
const sidebarProps = toSidebarsProp(loadedVersion);
|
||||
|
||||
expect(sidebarProps.docs[0].label).toBe('Hello One');
|
||||
expect(sidebarProps.docs[0].label).toEqual('Hello One');
|
||||
});
|
||||
|
||||
test('sidebar_label in doc has higher precedence over label in sidebar.json', async () => {
|
||||
it('sidebar_label in doc has higher precedence over label in sidebar.json', async () => {
|
||||
const {content} = await loadSite();
|
||||
const loadedVersion = content.loadedVersions[0];
|
||||
const sidebarProps = toSidebarsProp(loadedVersion);
|
||||
|
||||
expect(sidebarProps.docs[1].label).toBe('Hello 2 From Doc');
|
||||
expect(sidebarProps.docs[1].label).toEqual('Hello 2 From Doc');
|
||||
});
|
||||
});
|
||||
|
||||
|
|
@ -662,14 +632,14 @@ describe('site with full autogenerated sidebar', () => {
|
|||
return {content, siteDir};
|
||||
}
|
||||
|
||||
test('sidebar is fully autogenerated', async () => {
|
||||
it('sidebar is fully autogenerated', async () => {
|
||||
const {content} = await loadSite();
|
||||
const version = content.loadedVersions[0];
|
||||
|
||||
expect(version.sidebars).toMatchSnapshot();
|
||||
});
|
||||
|
||||
test('docs in fully generated sidebar have correct metadata', async () => {
|
||||
it('docs in fully generated sidebar have correct metadata', async () => {
|
||||
const {content} = await loadSite();
|
||||
const version = content.loadedVersions[0];
|
||||
|
||||
|
|
@ -720,14 +690,14 @@ describe('site with partial autogenerated sidebars', () => {
|
|||
return {content, siteDir};
|
||||
}
|
||||
|
||||
test('sidebar is partially autogenerated', async () => {
|
||||
it('sidebar is partially autogenerated', async () => {
|
||||
const {content} = await loadSite();
|
||||
const version = content.loadedVersions[0];
|
||||
|
||||
expect(version.sidebars).toMatchSnapshot();
|
||||
});
|
||||
|
||||
test('docs in partially generated sidebar have correct metadata', async () => {
|
||||
it('docs in partially generated sidebar have correct metadata', async () => {
|
||||
const {content} = await loadSite();
|
||||
const version = content.loadedVersions[0];
|
||||
|
||||
|
|
@ -773,7 +743,7 @@ describe('site with partial autogenerated sidebars 2 (fix #4638)', () => {
|
|||
return {content, siteDir};
|
||||
}
|
||||
|
||||
test('sidebar is partially autogenerated', async () => {
|
||||
it('sidebar is partially autogenerated', async () => {
|
||||
const {content} = await loadSite();
|
||||
const version = content.loadedVersions[0];
|
||||
|
||||
|
|
@ -800,7 +770,7 @@ describe('site with custom sidebar items generator', () => {
|
|||
return {content, siteDir};
|
||||
}
|
||||
|
||||
test('sidebarItemsGenerator is called with appropriate data', async () => {
|
||||
it('sidebarItemsGenerator is called with appropriate data', async () => {
|
||||
const customSidebarItemsGeneratorMock = jest.fn(
|
||||
async (_arg: SidebarItemsGeneratorOptionArgs) => [],
|
||||
);
|
||||
|
|
@ -830,7 +800,7 @@ describe('site with custom sidebar items generator', () => {
|
|||
);
|
||||
});
|
||||
|
||||
test('sidebar is autogenerated according to a custom sidebarItemsGenerator', async () => {
|
||||
it('sidebar is autogenerated according to a custom sidebarItemsGenerator', async () => {
|
||||
const customSidebarItemsGenerator: SidebarItemsGeneratorOption =
|
||||
async () => [
|
||||
{type: 'doc', id: 'API/api-overview'},
|
||||
|
|
@ -843,7 +813,7 @@ describe('site with custom sidebar items generator', () => {
|
|||
expect(version.sidebars).toMatchSnapshot();
|
||||
});
|
||||
|
||||
test('sidebarItemsGenerator can wrap/enhance/sort/reverse the default sidebar generator', async () => {
|
||||
it('sidebarItemsGenerator can wrap/enhance/sort/reverse the default sidebar generator', async () => {
|
||||
function reverseSidebarItems(items: SidebarItem[]): SidebarItem[] {
|
||||
const result: SidebarItem[] = items.map((item) => {
|
||||
if (item.type === 'category') {
|
||||
|
|
|
|||
|
|
@ -12,12 +12,12 @@ import shell from 'shelljs';
|
|||
|
||||
import {getFileLastUpdate} from '../lastUpdate';
|
||||
|
||||
describe('lastUpdate', () => {
|
||||
describe('getFileLastUpdate', () => {
|
||||
const existingFilePath = path.join(
|
||||
__dirname,
|
||||
'__fixtures__/simple-site/docs/hello.md',
|
||||
);
|
||||
test('existing test file in repository with Git timestamp', async () => {
|
||||
it('existing test file in repository with Git timestamp', async () => {
|
||||
const lastUpdateData = await getFileLastUpdate(existingFilePath);
|
||||
expect(lastUpdateData).not.toBeNull();
|
||||
|
||||
|
|
@ -29,7 +29,7 @@ describe('lastUpdate', () => {
|
|||
expect(typeof timestamp).toBe('number');
|
||||
});
|
||||
|
||||
test('existing test file with spaces in path', async () => {
|
||||
it('existing test file with spaces in path', async () => {
|
||||
const filePathWithSpace = path.join(
|
||||
__dirname,
|
||||
'__fixtures__/simple-site/docs/doc with space.md',
|
||||
|
|
@ -45,8 +45,10 @@ describe('lastUpdate', () => {
|
|||
expect(typeof timestamp).toBe('number');
|
||||
});
|
||||
|
||||
test('non-existing file', async () => {
|
||||
const consoleMock = jest.spyOn(console, 'error').mockImplementation();
|
||||
it('non-existing file', async () => {
|
||||
const consoleMock = jest
|
||||
.spyOn(console, 'error')
|
||||
.mockImplementation(() => {});
|
||||
const nonExistingFileName = '.nonExisting';
|
||||
const nonExistingFilePath = path.join(
|
||||
__dirname,
|
||||
|
|
@ -63,16 +65,18 @@ describe('lastUpdate', () => {
|
|||
consoleMock.mockRestore();
|
||||
});
|
||||
|
||||
test('temporary created file that has no git timestamp', async () => {
|
||||
it('temporary created file that has no git timestamp', async () => {
|
||||
const tempFilePath = path.join(__dirname, '__fixtures__', '.temp');
|
||||
await fs.writeFile(tempFilePath, 'Lorem ipsum :)');
|
||||
await expect(getFileLastUpdate(tempFilePath)).resolves.toBeNull();
|
||||
await fs.unlink(tempFilePath);
|
||||
});
|
||||
|
||||
test('Git does not exist', async () => {
|
||||
it('git does not exist', async () => {
|
||||
const mock = jest.spyOn(shell, 'which').mockImplementationOnce(() => null);
|
||||
const consoleMock = jest.spyOn(console, 'warn').mockImplementation();
|
||||
const consoleMock = jest
|
||||
.spyOn(console, 'warn')
|
||||
.mockImplementation(() => {});
|
||||
const lastUpdateData = await getFileLastUpdate(existingFilePath);
|
||||
expect(lastUpdateData).toBeNull();
|
||||
expect(consoleMock).toHaveBeenLastCalledWith(
|
||||
|
|
|
|||
|
|
@ -69,7 +69,7 @@ describe('stripNumberPrefix', () => {
|
|||
return stripNumberPrefix(str, DefaultNumberPrefixParser);
|
||||
}
|
||||
|
||||
test('should strip number prefix if present', () => {
|
||||
it('strips number prefix if present', () => {
|
||||
expect(stripNumberPrefixDefault('1-My Doc')).toEqual('My Doc');
|
||||
expect(stripNumberPrefixDefault('01-My Doc')).toEqual('My Doc');
|
||||
expect(stripNumberPrefixDefault('001-My Doc')).toEqual('My Doc');
|
||||
|
|
@ -111,7 +111,7 @@ describe('stripNumberPrefix', () => {
|
|||
);
|
||||
});
|
||||
|
||||
test('should not strip number prefix if pattern does not match', () => {
|
||||
it('does not strip number prefix if pattern does not match', () => {
|
||||
IgnoredNumberPrefixPatterns.forEach((badPattern) => {
|
||||
expect(stripNumberPrefixDefault(badPattern)).toEqual(badPattern);
|
||||
});
|
||||
|
|
@ -119,7 +119,7 @@ describe('stripNumberPrefix', () => {
|
|||
});
|
||||
|
||||
describe('stripPathNumberPrefix', () => {
|
||||
test('should strip number prefixes in paths', () => {
|
||||
it('strips number prefixes in paths', () => {
|
||||
expect(
|
||||
stripPathNumberPrefixes(
|
||||
'0-MyRootFolder0/1 - MySubFolder1/2. MyDeepFolder2/3 _MyDoc3',
|
||||
|
|
@ -128,7 +128,7 @@ describe('stripPathNumberPrefix', () => {
|
|||
).toEqual('MyRootFolder0/MySubFolder1/MyDeepFolder2/MyDoc3');
|
||||
});
|
||||
|
||||
test('should strip number prefixes in paths with custom parser', () => {
|
||||
it('strips number prefixes in paths with custom parser', () => {
|
||||
function stripPathNumberPrefixCustom(str: string) {
|
||||
return {
|
||||
filename: str.substring(1, str.length),
|
||||
|
|
@ -141,7 +141,7 @@ describe('stripPathNumberPrefix', () => {
|
|||
).toEqual('aaa/bbb/ccc');
|
||||
});
|
||||
|
||||
test('should strip number prefixes in paths with disabled parser', () => {
|
||||
it('does not strip number prefixes in paths with disabled parser', () => {
|
||||
expect(
|
||||
stripPathNumberPrefixes(
|
||||
'0-MyRootFolder0/1 - MySubFolder1/2. MyDeepFolder2/3 _MyDoc3',
|
||||
|
|
@ -152,7 +152,7 @@ describe('stripPathNumberPrefix', () => {
|
|||
});
|
||||
|
||||
describe('DefaultNumberPrefixParser', () => {
|
||||
test('should extract number prefix if present', () => {
|
||||
it('extracts number prefix if present', () => {
|
||||
expect(DefaultNumberPrefixParser('0-My Doc')).toEqual({
|
||||
filename: 'My Doc',
|
||||
numberPrefix: 0,
|
||||
|
|
@ -188,7 +188,7 @@ describe('DefaultNumberPrefixParser', () => {
|
|||
});
|
||||
});
|
||||
|
||||
test('should not extract number prefix if pattern does not match', () => {
|
||||
it('does not extract number prefix if pattern does not match', () => {
|
||||
IgnoredNumberPrefixPatterns.forEach((badPattern) => {
|
||||
expect(DefaultNumberPrefixParser(badPattern)).toEqual({
|
||||
filename: badPattern,
|
||||
|
|
|
|||
|
|
@ -30,13 +30,13 @@ function testValidateOptions(options: Partial<PluginOptions>) {
|
|||
}
|
||||
|
||||
describe('normalizeDocsPluginOptions', () => {
|
||||
test('should return default options for undefined user options', async () => {
|
||||
it('returns default options for undefined user options', async () => {
|
||||
const {value, error} = await OptionsSchema.validate({});
|
||||
expect(value).toEqual(DEFAULT_OPTIONS);
|
||||
expect(error).toBe(undefined);
|
||||
});
|
||||
|
||||
test('should accept correctly defined user options', async () => {
|
||||
it('accepts correctly defined user options', async () => {
|
||||
const userOptions = {
|
||||
path: 'my-docs', // Path to data on filesystem, relative to site dir.
|
||||
routeBasePath: 'my-docs', // URL Route.
|
||||
|
|
@ -82,7 +82,7 @@ describe('normalizeDocsPluginOptions', () => {
|
|||
expect(error).toBe(undefined);
|
||||
});
|
||||
|
||||
test('should accept correctly defined remark and rehype plugin options', async () => {
|
||||
it('accepts correctly defined remark and rehype plugin options', async () => {
|
||||
const userOptions = {
|
||||
...DEFAULT_OPTIONS,
|
||||
beforeDefaultRemarkPlugins: [],
|
||||
|
|
@ -98,7 +98,7 @@ describe('normalizeDocsPluginOptions', () => {
|
|||
expect(error).toBe(undefined);
|
||||
});
|
||||
|
||||
test('should accept admonitions false', async () => {
|
||||
it('accepts admonitions false', async () => {
|
||||
const admonitionsFalse = {
|
||||
...DEFAULT_OPTIONS,
|
||||
admonitions: false,
|
||||
|
|
@ -108,7 +108,7 @@ describe('normalizeDocsPluginOptions', () => {
|
|||
expect(error).toBe(undefined);
|
||||
});
|
||||
|
||||
test('should accept numberPrefixParser function', () => {
|
||||
it('accepts numberPrefixParser function', () => {
|
||||
function customNumberPrefixParser() {}
|
||||
expect(
|
||||
normalizePluginOptions(OptionsSchema, {
|
||||
|
|
@ -122,7 +122,7 @@ describe('normalizeDocsPluginOptions', () => {
|
|||
});
|
||||
});
|
||||
|
||||
test('should accept numberPrefixParser false', () => {
|
||||
it('accepts numberPrefixParser false', () => {
|
||||
expect(
|
||||
normalizePluginOptions(OptionsSchema, {
|
||||
...DEFAULT_OPTIONS,
|
||||
|
|
@ -135,7 +135,7 @@ describe('normalizeDocsPluginOptions', () => {
|
|||
});
|
||||
});
|
||||
|
||||
test('should accept numberPrefixParser true', () => {
|
||||
it('accepts numberPrefixParser true', () => {
|
||||
expect(
|
||||
normalizePluginOptions(OptionsSchema, {
|
||||
...DEFAULT_OPTIONS,
|
||||
|
|
@ -148,7 +148,7 @@ describe('normalizeDocsPluginOptions', () => {
|
|||
});
|
||||
});
|
||||
|
||||
test('should reject admonitions true', async () => {
|
||||
it('rejects admonitions true', async () => {
|
||||
const admonitionsTrue = {
|
||||
...DEFAULT_OPTIONS,
|
||||
admonitions: true,
|
||||
|
|
@ -159,7 +159,7 @@ describe('normalizeDocsPluginOptions', () => {
|
|||
);
|
||||
});
|
||||
|
||||
test('should reject invalid remark plugin options', () => {
|
||||
it('rejects invalid remark plugin options', () => {
|
||||
expect(() => {
|
||||
normalizePluginOptions(OptionsSchema, {
|
||||
remarkPlugins: [[{option1: '42'}, markdownPluginsFunctionStub]],
|
||||
|
|
@ -169,7 +169,7 @@ describe('normalizeDocsPluginOptions', () => {
|
|||
);
|
||||
});
|
||||
|
||||
test('should reject invalid rehype plugin options', () => {
|
||||
it('rejects invalid rehype plugin options', () => {
|
||||
expect(() => {
|
||||
normalizePluginOptions(OptionsSchema, {
|
||||
rehypePlugins: [
|
||||
|
|
@ -185,7 +185,7 @@ describe('normalizeDocsPluginOptions', () => {
|
|||
);
|
||||
});
|
||||
|
||||
test('should reject bad path inputs', () => {
|
||||
it('rejects bad path inputs', () => {
|
||||
expect(() => {
|
||||
normalizePluginOptions(OptionsSchema, {
|
||||
path: 2,
|
||||
|
|
@ -193,7 +193,7 @@ describe('normalizeDocsPluginOptions', () => {
|
|||
}).toThrowErrorMatchingInlineSnapshot(`"\\"path\\" must be a string"`);
|
||||
});
|
||||
|
||||
test('should reject bad include inputs', () => {
|
||||
it('rejects bad include inputs', () => {
|
||||
expect(() => {
|
||||
normalizePluginOptions(OptionsSchema, {
|
||||
include: '**/*.{md,mdx}',
|
||||
|
|
@ -201,7 +201,7 @@ describe('normalizeDocsPluginOptions', () => {
|
|||
}).toThrowErrorMatchingInlineSnapshot(`"\\"include\\" must be an array"`);
|
||||
});
|
||||
|
||||
test('should reject bad showLastUpdateTime inputs', () => {
|
||||
it('rejects bad showLastUpdateTime inputs', () => {
|
||||
expect(() => {
|
||||
normalizePluginOptions(OptionsSchema, {
|
||||
showLastUpdateTime: 'true',
|
||||
|
|
@ -211,7 +211,7 @@ describe('normalizeDocsPluginOptions', () => {
|
|||
);
|
||||
});
|
||||
|
||||
test('should reject bad remarkPlugins input', () => {
|
||||
it('rejects bad remarkPlugins input', () => {
|
||||
expect(() => {
|
||||
normalizePluginOptions(OptionsSchema, {
|
||||
remarkPlugins: 'remark-math',
|
||||
|
|
@ -221,7 +221,7 @@ describe('normalizeDocsPluginOptions', () => {
|
|||
);
|
||||
});
|
||||
|
||||
test('should reject bad lastVersion', () => {
|
||||
it('rejects bad lastVersion', () => {
|
||||
expect(() => {
|
||||
normalizePluginOptions(OptionsSchema, {
|
||||
lastVersion: false,
|
||||
|
|
@ -231,7 +231,7 @@ describe('normalizeDocsPluginOptions', () => {
|
|||
);
|
||||
});
|
||||
|
||||
test('should reject bad versions', () => {
|
||||
it('rejects bad versions', () => {
|
||||
expect(() => {
|
||||
normalizePluginOptions(OptionsSchema, {
|
||||
versions: {
|
||||
|
|
@ -249,7 +249,7 @@ describe('normalizeDocsPluginOptions', () => {
|
|||
);
|
||||
});
|
||||
|
||||
test('should handle sidebarCollapsed option inconsistencies', () => {
|
||||
it('handles sidebarCollapsed option inconsistencies', () => {
|
||||
expect(
|
||||
testValidateOptions({
|
||||
...DEFAULT_OPTIONS,
|
||||
|
|
|
|||
|
|
@ -14,7 +14,7 @@ describe('toTagDocListProp', () => {
|
|||
|
||||
const allTagsPath = '/all/tags';
|
||||
|
||||
test('should work', () => {
|
||||
it('works', () => {
|
||||
const tag: Tag = {
|
||||
name: 'tag1',
|
||||
permalink: '/tag1',
|
||||
|
|
|
|||
|
|
@ -8,7 +8,7 @@
|
|||
import getSlug from '../slug';
|
||||
|
||||
describe('getSlug', () => {
|
||||
test('should default to dirname/id', () => {
|
||||
it('defaults to dirname/id', () => {
|
||||
expect(
|
||||
getSlug({
|
||||
baseID: 'doc',
|
||||
|
|
@ -25,7 +25,7 @@ describe('getSlug', () => {
|
|||
).toEqual('/dir/subdir/doc');
|
||||
});
|
||||
|
||||
test('should handle conventional doc indexes', () => {
|
||||
it('handles conventional doc indexes', () => {
|
||||
expect(
|
||||
getSlug({
|
||||
baseID: 'doc',
|
||||
|
|
@ -70,7 +70,7 @@ describe('getSlug', () => {
|
|||
).toEqual('/dir/subdir/');
|
||||
});
|
||||
|
||||
test('should ignore conventional doc index when explicit slug front matter is provided', () => {
|
||||
it('ignores conventional doc index when explicit slug front matter is provided', () => {
|
||||
expect(
|
||||
getSlug({
|
||||
baseID: 'doc',
|
||||
|
|
@ -81,7 +81,7 @@ describe('getSlug', () => {
|
|||
).toEqual('/my/frontMatterSlug');
|
||||
});
|
||||
|
||||
test('can strip dir number prefixes', () => {
|
||||
it('can strip dir number prefixes', () => {
|
||||
expect(
|
||||
getSlug({
|
||||
baseID: 'doc',
|
||||
|
|
@ -101,7 +101,7 @@ describe('getSlug', () => {
|
|||
});
|
||||
|
||||
// See https://github.com/facebook/docusaurus/issues/3223
|
||||
test('should handle special chars in doc path', () => {
|
||||
it('handles special chars in doc path', () => {
|
||||
expect(
|
||||
getSlug({
|
||||
baseID: 'my dôc',
|
||||
|
|
@ -111,7 +111,7 @@ describe('getSlug', () => {
|
|||
).toEqual('/dir with spâce/hey $hello/my dôc');
|
||||
});
|
||||
|
||||
test('should handle current dir', () => {
|
||||
it('handles current dir', () => {
|
||||
expect(
|
||||
getSlug({baseID: 'doc', source: '@site/docs/doc.md', sourceDirName: '.'}),
|
||||
).toEqual('/doc');
|
||||
|
|
@ -120,7 +120,7 @@ describe('getSlug', () => {
|
|||
).toEqual('/doc');
|
||||
});
|
||||
|
||||
test('should resolve absolute slug front matter', () => {
|
||||
it('resolves absolute slug front matter', () => {
|
||||
expect(
|
||||
getSlug({
|
||||
baseID: 'any',
|
||||
|
|
@ -147,7 +147,7 @@ describe('getSlug', () => {
|
|||
).toEqual('/abc/def');
|
||||
});
|
||||
|
||||
test('should resolve relative slug front matter', () => {
|
||||
it('resolves relative slug front matter', () => {
|
||||
expect(
|
||||
getSlug({
|
||||
baseID: 'any',
|
||||
|
|
|
|||
|
|
@ -142,20 +142,20 @@ function getSampleTranslationFilesTranslated() {
|
|||
}
|
||||
|
||||
describe('getLoadedContentTranslationFiles', () => {
|
||||
test('should return translation files matching snapshot', async () => {
|
||||
it('returns translation files', async () => {
|
||||
expect(getSampleTranslationFiles()).toMatchSnapshot();
|
||||
});
|
||||
});
|
||||
|
||||
describe('translateLoadedContent', () => {
|
||||
test('should not translate anything if translation files are untranslated', () => {
|
||||
it('does not translate anything if translation files are untranslated', () => {
|
||||
const translationFiles = getSampleTranslationFiles();
|
||||
expect(
|
||||
translateLoadedContent(SampleLoadedContent, translationFiles),
|
||||
).toEqual(SampleLoadedContent);
|
||||
});
|
||||
|
||||
test('should return translated loaded content matching snapshot', () => {
|
||||
it('returns translated loaded content', () => {
|
||||
const translationFiles = getSampleTranslationFilesTranslated();
|
||||
expect(
|
||||
translateLoadedContent(SampleLoadedContent, translationFiles),
|
||||
|
|
|
|||
|
|
@ -27,7 +27,7 @@ const DefaultI18N: I18n = {
|
|||
};
|
||||
|
||||
describe('version paths', () => {
|
||||
test('getVersionsFilePath', () => {
|
||||
it('getVersionsFilePath', () => {
|
||||
expect(getVersionsFilePath('someSiteDir', DEFAULT_PLUGIN_ID)).toBe(
|
||||
`someSiteDir${path.sep}versions.json`,
|
||||
);
|
||||
|
|
@ -36,7 +36,7 @@ describe('version paths', () => {
|
|||
);
|
||||
});
|
||||
|
||||
test('getVersionedDocsDirPath', () => {
|
||||
it('getVersionedDocsDirPath', () => {
|
||||
expect(getVersionedDocsDirPath('someSiteDir', DEFAULT_PLUGIN_ID)).toBe(
|
||||
`someSiteDir${path.sep}versioned_docs`,
|
||||
);
|
||||
|
|
@ -45,7 +45,7 @@ describe('version paths', () => {
|
|||
);
|
||||
});
|
||||
|
||||
test('getVersionedSidebarsDirPath', () => {
|
||||
it('getVersionedSidebarsDirPath', () => {
|
||||
expect(getVersionedSidebarsDirPath('someSiteDir', DEFAULT_PLUGIN_ID)).toBe(
|
||||
`someSiteDir${path.sep}versioned_sidebars`,
|
||||
);
|
||||
|
|
@ -90,7 +90,7 @@ describe('simple site', () => {
|
|||
return {simpleSiteDir, defaultOptions, defaultContext, vCurrent};
|
||||
}
|
||||
|
||||
test('readVersionsMetadata simple site', async () => {
|
||||
it('readVersionsMetadata simple site', async () => {
|
||||
const {defaultOptions, defaultContext, vCurrent} = await loadSite();
|
||||
|
||||
const versionsMetadata = await readVersionsMetadata({
|
||||
|
|
@ -101,7 +101,7 @@ describe('simple site', () => {
|
|||
expect(versionsMetadata).toEqual([vCurrent]);
|
||||
});
|
||||
|
||||
test('readVersionsMetadata simple site with base url', async () => {
|
||||
it('readVersionsMetadata simple site with base url', async () => {
|
||||
const {defaultOptions, defaultContext, vCurrent} = await loadSite();
|
||||
|
||||
const versionsMetadata = await readVersionsMetadata({
|
||||
|
|
@ -121,7 +121,7 @@ describe('simple site', () => {
|
|||
]);
|
||||
});
|
||||
|
||||
test('readVersionsMetadata simple site with current version config', async () => {
|
||||
it('readVersionsMetadata simple site with current version config', async () => {
|
||||
const {defaultOptions, defaultContext, vCurrent} = await loadSite();
|
||||
|
||||
const versionsMetadata = await readVersionsMetadata({
|
||||
|
|
@ -154,7 +154,7 @@ describe('simple site', () => {
|
|||
]);
|
||||
});
|
||||
|
||||
test('readVersionsMetadata simple site with unknown lastVersion should throw', async () => {
|
||||
it('readVersionsMetadata simple site with unknown lastVersion should throw', async () => {
|
||||
const {defaultOptions, defaultContext} = await loadSite();
|
||||
|
||||
await expect(
|
||||
|
|
@ -167,7 +167,7 @@ describe('simple site', () => {
|
|||
);
|
||||
});
|
||||
|
||||
test('readVersionsMetadata simple site with unknown version configurations should throw', async () => {
|
||||
it('readVersionsMetadata simple site with unknown version configurations should throw', async () => {
|
||||
const {defaultOptions, defaultContext} = await loadSite();
|
||||
|
||||
await expect(
|
||||
|
|
@ -187,7 +187,7 @@ describe('simple site', () => {
|
|||
);
|
||||
});
|
||||
|
||||
test('readVersionsMetadata simple site with disableVersioning while single version should throw', async () => {
|
||||
it('readVersionsMetadata simple site with disableVersioning while single version should throw', async () => {
|
||||
const {defaultOptions, defaultContext} = await loadSite();
|
||||
|
||||
await expect(
|
||||
|
|
@ -200,7 +200,7 @@ describe('simple site', () => {
|
|||
);
|
||||
});
|
||||
|
||||
test('readVersionsMetadata simple site without including current version should throw', async () => {
|
||||
it('readVersionsMetadata simple site without including current version should throw', async () => {
|
||||
const {defaultOptions, defaultContext} = await loadSite();
|
||||
|
||||
await expect(
|
||||
|
|
@ -325,7 +325,7 @@ describe('versioned site, pluginId=default', () => {
|
|||
};
|
||||
}
|
||||
|
||||
test('readVersionsMetadata versioned site', async () => {
|
||||
it('readVersionsMetadata versioned site', async () => {
|
||||
const {defaultOptions, defaultContext, vCurrent, v101, v100, vwithSlugs} =
|
||||
await loadSite();
|
||||
|
||||
|
|
@ -337,7 +337,7 @@ describe('versioned site, pluginId=default', () => {
|
|||
expect(versionsMetadata).toEqual([vCurrent, v101, v100, vwithSlugs]);
|
||||
});
|
||||
|
||||
test('readVersionsMetadata versioned site with includeCurrentVersion=false', async () => {
|
||||
it('readVersionsMetadata versioned site with includeCurrentVersion=false', async () => {
|
||||
const {defaultOptions, defaultContext, v101, v100, vwithSlugs} =
|
||||
await loadSite();
|
||||
|
||||
|
|
@ -354,7 +354,7 @@ describe('versioned site, pluginId=default', () => {
|
|||
]);
|
||||
});
|
||||
|
||||
test('readVersionsMetadata versioned site with version options', async () => {
|
||||
it('readVersionsMetadata versioned site with version options', async () => {
|
||||
const {defaultOptions, defaultContext, vCurrent, v101, v100, vwithSlugs} =
|
||||
await loadSite();
|
||||
|
||||
|
|
@ -408,7 +408,7 @@ describe('versioned site, pluginId=default', () => {
|
|||
]);
|
||||
});
|
||||
|
||||
test('readVersionsMetadata versioned site with editUrl', async () => {
|
||||
it('readVersionsMetadata versioned site with editUrl', async () => {
|
||||
const {defaultOptions, defaultContext, vCurrent, v101, v100, vwithSlugs} =
|
||||
await loadSite();
|
||||
|
||||
|
|
@ -452,7 +452,7 @@ describe('versioned site, pluginId=default', () => {
|
|||
]);
|
||||
});
|
||||
|
||||
test('readVersionsMetadata versioned site with editUrl and editCurrentVersion=true', async () => {
|
||||
it('readVersionsMetadata versioned site with editUrl and editCurrentVersion=true', async () => {
|
||||
const {defaultOptions, defaultContext, vCurrent, v101, v100, vwithSlugs} =
|
||||
await loadSite();
|
||||
|
||||
|
|
@ -497,7 +497,7 @@ describe('versioned site, pluginId=default', () => {
|
|||
]);
|
||||
});
|
||||
|
||||
test('readVersionsMetadata versioned site with onlyIncludeVersions option', async () => {
|
||||
it('readVersionsMetadata versioned site with onlyIncludeVersions option', async () => {
|
||||
const {defaultOptions, defaultContext, v101, vwithSlugs} = await loadSite();
|
||||
|
||||
const versionsMetadata = await readVersionsMetadata({
|
||||
|
|
@ -512,7 +512,7 @@ describe('versioned site, pluginId=default', () => {
|
|||
expect(versionsMetadata).toEqual([v101, vwithSlugs]);
|
||||
});
|
||||
|
||||
test('readVersionsMetadata versioned site with disableVersioning', async () => {
|
||||
it('readVersionsMetadata versioned site with disableVersioning', async () => {
|
||||
const {defaultOptions, defaultContext, vCurrent} = await loadSite();
|
||||
|
||||
const versionsMetadata = await readVersionsMetadata({
|
||||
|
|
@ -533,7 +533,7 @@ describe('versioned site, pluginId=default', () => {
|
|||
]);
|
||||
});
|
||||
|
||||
test('readVersionsMetadata versioned site with all versions disabled', async () => {
|
||||
it('readVersionsMetadata versioned site with all versions disabled', async () => {
|
||||
const {defaultOptions, defaultContext} = await loadSite();
|
||||
|
||||
await expect(
|
||||
|
|
@ -550,7 +550,7 @@ describe('versioned site, pluginId=default', () => {
|
|||
);
|
||||
});
|
||||
|
||||
test('readVersionsMetadata versioned site with empty onlyIncludeVersions', async () => {
|
||||
it('readVersionsMetadata versioned site with empty onlyIncludeVersions', async () => {
|
||||
const {defaultOptions, defaultContext} = await loadSite();
|
||||
|
||||
await expect(
|
||||
|
|
@ -566,7 +566,7 @@ describe('versioned site, pluginId=default', () => {
|
|||
);
|
||||
});
|
||||
|
||||
test('readVersionsMetadata versioned site with unknown versions in onlyIncludeVersions', async () => {
|
||||
it('readVersionsMetadata versioned site with unknown versions in onlyIncludeVersions', async () => {
|
||||
const {defaultOptions, defaultContext} = await loadSite();
|
||||
|
||||
await expect(
|
||||
|
|
@ -582,7 +582,7 @@ describe('versioned site, pluginId=default', () => {
|
|||
);
|
||||
});
|
||||
|
||||
test('readVersionsMetadata versioned site with lastVersion not in onlyIncludeVersions', async () => {
|
||||
it('readVersionsMetadata versioned site with lastVersion not in onlyIncludeVersions', async () => {
|
||||
const {defaultOptions, defaultContext} = await loadSite();
|
||||
|
||||
await expect(
|
||||
|
|
@ -599,7 +599,7 @@ describe('versioned site, pluginId=default', () => {
|
|||
);
|
||||
});
|
||||
|
||||
test('readVersionsMetadata versioned site with invalid versions.json file', async () => {
|
||||
it('readVersionsMetadata versioned site with invalid versions.json file', async () => {
|
||||
const {defaultOptions, defaultContext} = await loadSite();
|
||||
|
||||
const mock = jest.spyOn(JSON, 'parse').mockImplementationOnce(() => ({
|
||||
|
|
@ -681,7 +681,7 @@ describe('versioned site, pluginId=community', () => {
|
|||
return {versionedSiteDir, defaultOptions, defaultContext, vCurrent, v100};
|
||||
}
|
||||
|
||||
test('readVersionsMetadata versioned site (community)', async () => {
|
||||
it('readVersionsMetadata versioned site (community)', async () => {
|
||||
const {defaultOptions, defaultContext, vCurrent, v100} = await loadSite();
|
||||
|
||||
const versionsMetadata = await readVersionsMetadata({
|
||||
|
|
@ -692,7 +692,7 @@ describe('versioned site, pluginId=community', () => {
|
|||
expect(versionsMetadata).toEqual([vCurrent, v100]);
|
||||
});
|
||||
|
||||
test('readVersionsMetadata versioned site (community) with includeCurrentVersion=false', async () => {
|
||||
it('readVersionsMetadata versioned site (community) with includeCurrentVersion=false', async () => {
|
||||
const {defaultOptions, defaultContext, v100} = await loadSite();
|
||||
|
||||
const versionsMetadata = await readVersionsMetadata({
|
||||
|
|
@ -706,7 +706,7 @@ describe('versioned site, pluginId=community', () => {
|
|||
]);
|
||||
});
|
||||
|
||||
test('readVersionsMetadata versioned site (community) with disableVersioning', async () => {
|
||||
it('readVersionsMetadata versioned site (community) with disableVersioning', async () => {
|
||||
const {defaultOptions, defaultContext, vCurrent} = await loadSite();
|
||||
|
||||
const versionsMetadata = await readVersionsMetadata({
|
||||
|
|
@ -727,7 +727,7 @@ describe('versioned site, pluginId=community', () => {
|
|||
]);
|
||||
});
|
||||
|
||||
test('readVersionsMetadata versioned site (community) with all versions disabled', async () => {
|
||||
it('readVersionsMetadata versioned site (community) with all versions disabled', async () => {
|
||||
const {defaultOptions, defaultContext} = await loadSite();
|
||||
|
||||
await expect(
|
||||
|
|
|
|||
|
|
@ -20,7 +20,7 @@ import type {
|
|||
import _ from 'lodash';
|
||||
|
||||
describe('docsClientUtils', () => {
|
||||
test('getActivePlugin', () => {
|
||||
it('getActivePlugin', () => {
|
||||
const data: Record<string, GlobalPluginData> = {
|
||||
pluginIosId: {
|
||||
path: '/ios',
|
||||
|
|
@ -91,7 +91,7 @@ describe('docsClientUtils', () => {
|
|||
).toEqual('pluginAndroidId');
|
||||
});
|
||||
|
||||
test('getLatestVersion', () => {
|
||||
it('getLatestVersion', () => {
|
||||
const versions: GlobalVersion[] = [
|
||||
{
|
||||
name: 'version1',
|
||||
|
|
@ -127,7 +127,7 @@ describe('docsClientUtils', () => {
|
|||
).toEqual(versions[1]);
|
||||
});
|
||||
|
||||
test('getActiveVersion', () => {
|
||||
it('getActiveVersion', () => {
|
||||
const data: GlobalPluginData = {
|
||||
path: 'docs',
|
||||
versions: [
|
||||
|
|
@ -175,7 +175,7 @@ describe('docsClientUtils', () => {
|
|||
);
|
||||
});
|
||||
|
||||
test('getActiveDocContext', () => {
|
||||
it('getActiveDocContext', () => {
|
||||
const versionNext: GlobalVersion = {
|
||||
name: 'next',
|
||||
label: 'next',
|
||||
|
|
@ -304,7 +304,7 @@ describe('docsClientUtils', () => {
|
|||
});
|
||||
});
|
||||
|
||||
test('getDocVersionSuggestions', () => {
|
||||
it('getDocVersionSuggestions', () => {
|
||||
const versionNext: GlobalVersion = {
|
||||
name: 'next',
|
||||
label: 'next',
|
||||
|
|
|
|||
|
|
@ -1,6 +1,17 @@
|
|||
// Jest Snapshot v1, https://goo.gl/fbAQLP
|
||||
|
||||
exports[`transform nothing 1`] = `
|
||||
exports[`linkify transforms absolute links in versioned docs 1`] = `
|
||||
"### Existing Docs
|
||||
|
||||
- [doc1](/docs/1.0.0/subdir/doc1)
|
||||
|
||||
### With hash
|
||||
|
||||
- [doc2](/docs/1.0.0/doc2#existing-docs)
|
||||
"
|
||||
`;
|
||||
|
||||
exports[`linkify transforms nothing with no links 1`] = `
|
||||
"# Don't transform any link here
|
||||
|
||||

|
||||
|
|
@ -17,41 +28,7 @@ exports[`transform nothing 1`] = `
|
|||
"
|
||||
`;
|
||||
|
||||
exports[`transform relative links 1`] = `
|
||||
"### Relative linking
|
||||
|
||||
- [doc1](/docs/doc2)
|
||||
"
|
||||
`;
|
||||
|
||||
exports[`transform to correct links 1`] = `
|
||||
"### Existing Docs
|
||||
|
||||
- [doc1](/docs/doc1)
|
||||
- [doc2](/docs/doc2)
|
||||
- [doc3](/docs/subdir/doc3)
|
||||
|
||||
## Repeating Docs
|
||||
|
||||
- [doc1](/docs/doc1)
|
||||
- [doc2](/docs/doc2)
|
||||
|
||||
- [doc-localized](/fr/doc-localized)
|
||||
"
|
||||
`;
|
||||
|
||||
exports[`transforms absolute links in versioned docs 1`] = `
|
||||
"### Existing Docs
|
||||
|
||||
- [doc1](/docs/1.0.0/subdir/doc1)
|
||||
|
||||
### With hash
|
||||
|
||||
- [doc2](/docs/1.0.0/doc2#existing-docs)
|
||||
"
|
||||
`;
|
||||
|
||||
exports[`transforms reference links 1`] = `
|
||||
exports[`linkify transforms reference links 1`] = `
|
||||
"### Existing Docs
|
||||
|
||||
- [doc1][doc1]
|
||||
|
|
@ -74,9 +51,32 @@ exports[`transforms reference links 1`] = `
|
|||
"
|
||||
`;
|
||||
|
||||
exports[`transforms relative links in versioned docs 1`] = `
|
||||
exports[`linkify transforms relative links 1`] = `
|
||||
"### Relative linking
|
||||
|
||||
- [doc1](/docs/doc2)
|
||||
"
|
||||
`;
|
||||
|
||||
exports[`linkify transforms relative links in versioned docs 1`] = `
|
||||
"### Relative linking
|
||||
|
||||
- [doc1](/docs/1.0.0/doc2)
|
||||
"
|
||||
`;
|
||||
|
||||
exports[`linkify transforms to correct links 1`] = `
|
||||
"### Existing Docs
|
||||
|
||||
- [doc1](/docs/doc1)
|
||||
- [doc2](/docs/doc2)
|
||||
- [doc3](/docs/subdir/doc3)
|
||||
|
||||
## Repeating Docs
|
||||
|
||||
- [doc1](/docs/doc1)
|
||||
- [doc2](/docs/doc2)
|
||||
|
||||
- [doc-localized](/fr/doc-localized)
|
||||
"
|
||||
`;
|
||||
|
|
|
|||
|
|
@ -100,95 +100,97 @@ const transform = async (
|
|||
return [content, transformedContent];
|
||||
};
|
||||
|
||||
test('transform nothing', async () => {
|
||||
const doc1 = path.join(versionCurrent.contentPath, 'doc1.md');
|
||||
const [content, transformedContent] = await transform(doc1);
|
||||
expect(transformedContent).toMatchSnapshot();
|
||||
expect(content).toEqual(transformedContent);
|
||||
});
|
||||
|
||||
test('transform to correct links', async () => {
|
||||
const doc2 = path.join(versionCurrent.contentPath, 'doc2.md');
|
||||
const [content, transformedContent] = await transform(doc2);
|
||||
expect(transformedContent).toMatchSnapshot();
|
||||
expect(transformedContent).toContain('](/docs/doc1');
|
||||
expect(transformedContent).toContain('](/docs/doc2');
|
||||
expect(transformedContent).toContain('](/docs/subdir/doc3');
|
||||
expect(transformedContent).toContain('](/fr/doc-localized');
|
||||
expect(transformedContent).not.toContain('](doc1.md)');
|
||||
expect(transformedContent).not.toContain('](./doc2.md)');
|
||||
expect(transformedContent).not.toContain('](subdir/doc3.md)');
|
||||
expect(transformedContent).not.toContain('](/doc-localized');
|
||||
expect(content).not.toEqual(transformedContent);
|
||||
});
|
||||
|
||||
test('transform relative links', async () => {
|
||||
const doc3 = path.join(versionCurrent.contentPath, 'subdir', 'doc3.md');
|
||||
|
||||
const [content, transformedContent] = await transform(doc3);
|
||||
expect(transformedContent).toMatchSnapshot();
|
||||
expect(transformedContent).toContain('](/docs/doc2');
|
||||
expect(transformedContent).not.toContain('](../doc2.md)');
|
||||
expect(content).not.toEqual(transformedContent);
|
||||
});
|
||||
|
||||
test('transforms reference links', async () => {
|
||||
const doc4 = path.join(versionCurrent.contentPath, 'doc4.md');
|
||||
const [content, transformedContent] = await transform(doc4);
|
||||
expect(transformedContent).toMatchSnapshot();
|
||||
expect(transformedContent).toContain('[doc1]: /docs/doc1');
|
||||
expect(transformedContent).toContain('[doc2]: /docs/doc2');
|
||||
expect(transformedContent).not.toContain('[doc1]: doc1.md');
|
||||
expect(transformedContent).not.toContain('[doc2]: ./doc2.md');
|
||||
expect(content).not.toEqual(transformedContent);
|
||||
});
|
||||
|
||||
test('report broken markdown links', async () => {
|
||||
const doc5 = path.join(versionCurrent.contentPath, 'doc5.md');
|
||||
const onBrokenMarkdownLink = jest.fn();
|
||||
const [content, transformedContent] = await transform(doc5, {
|
||||
onBrokenMarkdownLink,
|
||||
describe('linkify', () => {
|
||||
it('transforms nothing with no links', async () => {
|
||||
const doc1 = path.join(versionCurrent.contentPath, 'doc1.md');
|
||||
const [content, transformedContent] = await transform(doc1);
|
||||
expect(transformedContent).toMatchSnapshot();
|
||||
expect(content).toEqual(transformedContent);
|
||||
});
|
||||
expect(transformedContent).toEqual(content);
|
||||
expect(onBrokenMarkdownLink).toHaveBeenCalledTimes(4);
|
||||
expect(onBrokenMarkdownLink).toHaveBeenNthCalledWith(1, {
|
||||
filePath: doc5,
|
||||
link: 'docNotExist1.md',
|
||||
contentPaths: versionCurrent,
|
||||
} as BrokenMarkdownLink);
|
||||
expect(onBrokenMarkdownLink).toHaveBeenNthCalledWith(2, {
|
||||
filePath: doc5,
|
||||
link: './docNotExist2.mdx',
|
||||
contentPaths: versionCurrent,
|
||||
} as BrokenMarkdownLink);
|
||||
expect(onBrokenMarkdownLink).toHaveBeenNthCalledWith(3, {
|
||||
filePath: doc5,
|
||||
link: '../docNotExist3.mdx',
|
||||
contentPaths: versionCurrent,
|
||||
} as BrokenMarkdownLink);
|
||||
expect(onBrokenMarkdownLink).toHaveBeenNthCalledWith(4, {
|
||||
filePath: doc5,
|
||||
link: './subdir/docNotExist4.md',
|
||||
contentPaths: versionCurrent,
|
||||
} as BrokenMarkdownLink);
|
||||
});
|
||||
|
||||
test('transforms absolute links in versioned docs', async () => {
|
||||
const doc2 = path.join(version100.contentPath, 'doc2.md');
|
||||
const [content, transformedContent] = await transform(doc2);
|
||||
expect(transformedContent).toMatchSnapshot();
|
||||
expect(transformedContent).toContain('](/docs/1.0.0/subdir/doc1');
|
||||
expect(transformedContent).toContain('](/docs/1.0.0/doc2#existing-docs');
|
||||
expect(transformedContent).not.toContain('](subdir/doc1.md)');
|
||||
expect(transformedContent).not.toContain('](doc2.md#existing-docs)');
|
||||
expect(content).not.toEqual(transformedContent);
|
||||
});
|
||||
it('transforms to correct links', async () => {
|
||||
const doc2 = path.join(versionCurrent.contentPath, 'doc2.md');
|
||||
const [content, transformedContent] = await transform(doc2);
|
||||
expect(transformedContent).toMatchSnapshot();
|
||||
expect(transformedContent).toContain('](/docs/doc1');
|
||||
expect(transformedContent).toContain('](/docs/doc2');
|
||||
expect(transformedContent).toContain('](/docs/subdir/doc3');
|
||||
expect(transformedContent).toContain('](/fr/doc-localized');
|
||||
expect(transformedContent).not.toContain('](doc1.md)');
|
||||
expect(transformedContent).not.toContain('](./doc2.md)');
|
||||
expect(transformedContent).not.toContain('](subdir/doc3.md)');
|
||||
expect(transformedContent).not.toContain('](/doc-localized');
|
||||
expect(content).not.toEqual(transformedContent);
|
||||
});
|
||||
|
||||
test('transforms relative links in versioned docs', async () => {
|
||||
const doc1 = path.join(version100.contentPath, 'subdir', 'doc1.md');
|
||||
const [content, transformedContent] = await transform(doc1);
|
||||
expect(transformedContent).toMatchSnapshot();
|
||||
expect(transformedContent).toContain('](/docs/1.0.0/doc2');
|
||||
expect(transformedContent).not.toContain('](../doc2.md)');
|
||||
expect(content).not.toEqual(transformedContent);
|
||||
it('transforms relative links', async () => {
|
||||
const doc3 = path.join(versionCurrent.contentPath, 'subdir', 'doc3.md');
|
||||
|
||||
const [content, transformedContent] = await transform(doc3);
|
||||
expect(transformedContent).toMatchSnapshot();
|
||||
expect(transformedContent).toContain('](/docs/doc2');
|
||||
expect(transformedContent).not.toContain('](../doc2.md)');
|
||||
expect(content).not.toEqual(transformedContent);
|
||||
});
|
||||
|
||||
it('transforms reference links', async () => {
|
||||
const doc4 = path.join(versionCurrent.contentPath, 'doc4.md');
|
||||
const [content, transformedContent] = await transform(doc4);
|
||||
expect(transformedContent).toMatchSnapshot();
|
||||
expect(transformedContent).toContain('[doc1]: /docs/doc1');
|
||||
expect(transformedContent).toContain('[doc2]: /docs/doc2');
|
||||
expect(transformedContent).not.toContain('[doc1]: doc1.md');
|
||||
expect(transformedContent).not.toContain('[doc2]: ./doc2.md');
|
||||
expect(content).not.toEqual(transformedContent);
|
||||
});
|
||||
|
||||
it('reports broken markdown links', async () => {
|
||||
const doc5 = path.join(versionCurrent.contentPath, 'doc5.md');
|
||||
const onBrokenMarkdownLink = jest.fn();
|
||||
const [content, transformedContent] = await transform(doc5, {
|
||||
onBrokenMarkdownLink,
|
||||
});
|
||||
expect(transformedContent).toEqual(content);
|
||||
expect(onBrokenMarkdownLink).toHaveBeenCalledTimes(4);
|
||||
expect(onBrokenMarkdownLink).toHaveBeenNthCalledWith(1, {
|
||||
filePath: doc5,
|
||||
link: 'docNotExist1.md',
|
||||
contentPaths: versionCurrent,
|
||||
} as BrokenMarkdownLink);
|
||||
expect(onBrokenMarkdownLink).toHaveBeenNthCalledWith(2, {
|
||||
filePath: doc5,
|
||||
link: './docNotExist2.mdx',
|
||||
contentPaths: versionCurrent,
|
||||
} as BrokenMarkdownLink);
|
||||
expect(onBrokenMarkdownLink).toHaveBeenNthCalledWith(3, {
|
||||
filePath: doc5,
|
||||
link: '../docNotExist3.mdx',
|
||||
contentPaths: versionCurrent,
|
||||
} as BrokenMarkdownLink);
|
||||
expect(onBrokenMarkdownLink).toHaveBeenNthCalledWith(4, {
|
||||
filePath: doc5,
|
||||
link: './subdir/docNotExist4.md',
|
||||
contentPaths: versionCurrent,
|
||||
} as BrokenMarkdownLink);
|
||||
});
|
||||
|
||||
it('transforms absolute links in versioned docs', async () => {
|
||||
const doc2 = path.join(version100.contentPath, 'doc2.md');
|
||||
const [content, transformedContent] = await transform(doc2);
|
||||
expect(transformedContent).toMatchSnapshot();
|
||||
expect(transformedContent).toContain('](/docs/1.0.0/subdir/doc1');
|
||||
expect(transformedContent).toContain('](/docs/1.0.0/doc2#existing-docs');
|
||||
expect(transformedContent).not.toContain('](subdir/doc1.md)');
|
||||
expect(transformedContent).not.toContain('](doc2.md#existing-docs)');
|
||||
expect(content).not.toEqual(transformedContent);
|
||||
});
|
||||
|
||||
it('transforms relative links in versioned docs', async () => {
|
||||
const doc1 = path.join(version100.contentPath, 'subdir', 'doc1.md');
|
||||
const [content, transformedContent] = await transform(doc1);
|
||||
expect(transformedContent).toMatchSnapshot();
|
||||
expect(transformedContent).toContain('](/docs/1.0.0/doc2');
|
||||
expect(transformedContent).not.toContain('](../doc2.md)');
|
||||
expect(content).not.toEqual(transformedContent);
|
||||
});
|
||||
});
|
||||
|
|
|
|||
|
|
@ -0,0 +1,78 @@
|
|||
// Jest Snapshot v1, https://goo.gl/fbAQLP
|
||||
|
||||
exports[`postProcess corrects collapsed state inconsistencies 1`] = `
|
||||
Object {
|
||||
"sidebar": Array [
|
||||
Object {
|
||||
"collapsed": false,
|
||||
"collapsible": false,
|
||||
"items": Array [
|
||||
Object {
|
||||
"id": "foo",
|
||||
"type": "doc",
|
||||
},
|
||||
],
|
||||
"label": "Category",
|
||||
"link": undefined,
|
||||
"type": "category",
|
||||
},
|
||||
],
|
||||
}
|
||||
`;
|
||||
|
||||
exports[`postProcess corrects collapsed state inconsistencies 2`] = `
|
||||
Object {
|
||||
"sidebar": Array [
|
||||
Object {
|
||||
"collapsed": false,
|
||||
"collapsible": false,
|
||||
"items": Array [
|
||||
Object {
|
||||
"id": "foo",
|
||||
"type": "doc",
|
||||
},
|
||||
],
|
||||
"label": "Category",
|
||||
"link": undefined,
|
||||
"type": "category",
|
||||
},
|
||||
],
|
||||
}
|
||||
`;
|
||||
|
||||
exports[`postProcess corrects collapsed state inconsistencies 3`] = `
|
||||
Object {
|
||||
"sidebar": Array [
|
||||
Object {
|
||||
"collapsed": false,
|
||||
"collapsible": false,
|
||||
"items": Array [
|
||||
Object {
|
||||
"id": "foo",
|
||||
"type": "doc",
|
||||
},
|
||||
],
|
||||
"label": "Category",
|
||||
"link": undefined,
|
||||
"type": "category",
|
||||
},
|
||||
],
|
||||
}
|
||||
`;
|
||||
|
||||
exports[`postProcess transforms category without subitems 1`] = `
|
||||
Object {
|
||||
"sidebar": Array [
|
||||
Object {
|
||||
"href": "version/generated/permalink",
|
||||
"label": "Category",
|
||||
"type": "link",
|
||||
},
|
||||
Object {
|
||||
"id": "doc ID",
|
||||
"label": "Category 2",
|
||||
"type": "doc",
|
||||
},
|
||||
],
|
||||
}
|
||||
`;
|
||||
|
|
@ -36,7 +36,7 @@ describe('DefaultSidebarItemsGenerator', () => {
|
|||
});
|
||||
}
|
||||
|
||||
test('generates empty sidebar slice when no docs and emit a warning', async () => {
|
||||
it('generates empty sidebar slice when no docs and emit a warning', async () => {
|
||||
const consoleWarn = jest.spyOn(console, 'warn');
|
||||
const sidebarSlice = await testDefaultSidebarItemsGenerator({
|
||||
docs: [],
|
||||
|
|
@ -49,7 +49,7 @@ describe('DefaultSidebarItemsGenerator', () => {
|
|||
);
|
||||
});
|
||||
|
||||
test('generates simple flat sidebar', async () => {
|
||||
it('generates simple flat sidebar', async () => {
|
||||
const sidebarSlice = await DefaultSidebarItemsGenerator({
|
||||
numberPrefixParser: DefaultNumberPrefixParser,
|
||||
item: {
|
||||
|
|
@ -108,7 +108,7 @@ describe('DefaultSidebarItemsGenerator', () => {
|
|||
expect(sidebarSlice).toMatchSnapshot();
|
||||
});
|
||||
|
||||
test('generates complex nested sidebar', async () => {
|
||||
it('generates complex nested sidebar', async () => {
|
||||
const sidebarSlice = await DefaultSidebarItemsGenerator({
|
||||
numberPrefixParser: DefaultNumberPrefixParser,
|
||||
isCategoryIndex,
|
||||
|
|
@ -212,7 +212,7 @@ describe('DefaultSidebarItemsGenerator', () => {
|
|||
expect(sidebarSlice).toMatchSnapshot();
|
||||
});
|
||||
|
||||
test('generates subfolder sidebar', async () => {
|
||||
it('generates subfolder sidebar', async () => {
|
||||
// Ensure that category metadata file is correctly read
|
||||
// fix edge case found in https://github.com/facebook/docusaurus/issues/4638
|
||||
const sidebarSlice = await DefaultSidebarItemsGenerator({
|
||||
|
|
@ -308,7 +308,7 @@ describe('DefaultSidebarItemsGenerator', () => {
|
|||
expect(sidebarSlice).toMatchSnapshot();
|
||||
});
|
||||
|
||||
test('uses explicit link over the index/readme.{md,mdx} naming convention', async () => {
|
||||
it('uses explicit link over the index/readme.{md,mdx} naming convention', async () => {
|
||||
const sidebarSlice = await DefaultSidebarItemsGenerator({
|
||||
numberPrefixParser: DefaultNumberPrefixParser,
|
||||
item: {
|
||||
|
|
@ -379,7 +379,7 @@ describe('DefaultSidebarItemsGenerator', () => {
|
|||
expect(sidebarSlice).toMatchSnapshot();
|
||||
});
|
||||
|
||||
test('respects custom isCategoryIndex', async () => {
|
||||
it('respects custom isCategoryIndex', async () => {
|
||||
const sidebarSlice = await DefaultSidebarItemsGenerator({
|
||||
numberPrefixParser: DefaultNumberPrefixParser,
|
||||
isCategoryIndex({fileName, directories}) {
|
||||
|
|
@ -462,7 +462,7 @@ describe('DefaultSidebarItemsGenerator', () => {
|
|||
expect(sidebarSlice).toMatchSnapshot();
|
||||
});
|
||||
|
||||
test('throws for unknown index link', async () => {
|
||||
it('throws for unknown index link', async () => {
|
||||
const generateSidebar = () =>
|
||||
DefaultSidebarItemsGenerator({
|
||||
numberPrefixParser: DefaultNumberPrefixParser,
|
||||
|
|
|
|||
|
|
@ -5,6 +5,7 @@
|
|||
* LICENSE file in the root directory of this source tree.
|
||||
*/
|
||||
|
||||
import {jest} from '@jest/globals';
|
||||
import path from 'path';
|
||||
import {loadSidebars, DisabledSidebars} from '../index';
|
||||
import type {SidebarProcessorParams} from '../types';
|
||||
|
|
@ -30,19 +31,19 @@ describe('loadSidebars', () => {
|
|||
categoryLabelSlugger: null,
|
||||
sidebarOptions: {sidebarCollapsed: true, sidebarCollapsible: true},
|
||||
};
|
||||
test('sidebars with known sidebar item type', async () => {
|
||||
it('sidebars with known sidebar item type', async () => {
|
||||
const sidebarPath = path.join(fixtureDir, 'sidebars.json');
|
||||
const result = await loadSidebars(sidebarPath, params);
|
||||
expect(result).toMatchSnapshot();
|
||||
});
|
||||
|
||||
test('sidebars with deep level of category', async () => {
|
||||
it('sidebars with deep level of category', async () => {
|
||||
const sidebarPath = path.join(fixtureDir, 'sidebars-category.js');
|
||||
const result = await loadSidebars(sidebarPath, params);
|
||||
expect(result).toMatchSnapshot();
|
||||
});
|
||||
|
||||
test('sidebars shorthand and longform lead to exact same sidebar', async () => {
|
||||
it('sidebars shorthand and longform lead to exact same sidebar', async () => {
|
||||
const sidebarPath1 = path.join(fixtureDir, 'sidebars-category.js');
|
||||
const sidebarPath2 = path.join(
|
||||
fixtureDir,
|
||||
|
|
@ -53,7 +54,7 @@ describe('loadSidebars', () => {
|
|||
expect(sidebar1).toEqual(sidebar2);
|
||||
});
|
||||
|
||||
test('sidebars with category but category.items is not an array', async () => {
|
||||
it('sidebars with category but category.items is not an array', async () => {
|
||||
const sidebarPath = path.join(
|
||||
fixtureDir,
|
||||
'sidebars-category-wrong-items.json',
|
||||
|
|
@ -65,7 +66,7 @@ describe('loadSidebars', () => {
|
|||
);
|
||||
});
|
||||
|
||||
test('sidebars with first level not a category', async () => {
|
||||
it('sidebars with first level not a category', async () => {
|
||||
const sidebarPath = path.join(
|
||||
fixtureDir,
|
||||
'sidebars-first-level-not-category.js',
|
||||
|
|
@ -74,35 +75,35 @@ describe('loadSidebars', () => {
|
|||
expect(result).toMatchSnapshot();
|
||||
});
|
||||
|
||||
test('sidebars link', async () => {
|
||||
it('sidebars link', async () => {
|
||||
const sidebarPath = path.join(fixtureDir, 'sidebars-link.json');
|
||||
const result = await loadSidebars(sidebarPath, params);
|
||||
expect(result).toMatchSnapshot();
|
||||
});
|
||||
|
||||
test('unexisting path', async () => {
|
||||
it('unexisting path', async () => {
|
||||
await expect(loadSidebars('badpath', params)).resolves.toEqual(
|
||||
DisabledSidebars,
|
||||
);
|
||||
});
|
||||
|
||||
test('undefined path', async () => {
|
||||
it('undefined path', async () => {
|
||||
await expect(loadSidebars(undefined, params)).resolves.toMatchSnapshot();
|
||||
});
|
||||
|
||||
test('literal false path', async () => {
|
||||
it('literal false path', async () => {
|
||||
await expect(loadSidebars(false, params)).resolves.toEqual(
|
||||
DisabledSidebars,
|
||||
);
|
||||
});
|
||||
|
||||
test('sidebars with category.collapsed property', async () => {
|
||||
it('sidebars with category.collapsed property', async () => {
|
||||
const sidebarPath = path.join(fixtureDir, 'sidebars-collapsed.json');
|
||||
const result = await loadSidebars(sidebarPath, params);
|
||||
expect(result).toMatchSnapshot();
|
||||
});
|
||||
|
||||
test('sidebars with category.collapsed property at first level', async () => {
|
||||
it('sidebars with category.collapsed property at first level', async () => {
|
||||
const sidebarPath = path.join(
|
||||
fixtureDir,
|
||||
'sidebars-collapsed-first-level.json',
|
||||
|
|
@ -111,7 +112,7 @@ describe('loadSidebars', () => {
|
|||
expect(result).toMatchSnapshot();
|
||||
});
|
||||
|
||||
test('duplicate category metadata files', async () => {
|
||||
it('duplicate category metadata files', async () => {
|
||||
const sidebarPath = path.join(
|
||||
fixtureDir,
|
||||
'sidebars-collapsed-first-level.json',
|
||||
|
|
|
|||
|
|
@ -8,7 +8,7 @@
|
|||
import {normalizeSidebars} from '../normalization';
|
||||
|
||||
describe('normalization', () => {
|
||||
test('normalizes shorthands', () => {
|
||||
it('normalizes shorthands', () => {
|
||||
expect(
|
||||
normalizeSidebars({
|
||||
sidebar: {
|
||||
|
|
@ -37,7 +37,7 @@ describe('normalization', () => {
|
|||
}),
|
||||
).toMatchSnapshot();
|
||||
});
|
||||
test('rejects some invalid cases', () => {
|
||||
it('rejects some invalid cases', () => {
|
||||
expect(() =>
|
||||
normalizeSidebars({
|
||||
sidebar: {
|
||||
|
|
|
|||
|
|
@ -8,7 +8,7 @@
|
|||
import {postProcessSidebars} from '../postProcessor';
|
||||
|
||||
describe('postProcess', () => {
|
||||
test('transforms category without subitems', () => {
|
||||
it('transforms category without subitems', () => {
|
||||
const processedSidebar = postProcessSidebars(
|
||||
{
|
||||
sidebar: [
|
||||
|
|
@ -38,22 +38,7 @@ describe('postProcess', () => {
|
|||
},
|
||||
);
|
||||
|
||||
expect(processedSidebar).toMatchInlineSnapshot(`
|
||||
Object {
|
||||
"sidebar": Array [
|
||||
Object {
|
||||
"href": "version/generated/permalink",
|
||||
"label": "Category",
|
||||
"type": "link",
|
||||
},
|
||||
Object {
|
||||
"id": "doc ID",
|
||||
"label": "Category 2",
|
||||
"type": "doc",
|
||||
},
|
||||
],
|
||||
}
|
||||
`);
|
||||
expect(processedSidebar).toMatchSnapshot();
|
||||
|
||||
expect(() => {
|
||||
postProcessSidebars(
|
||||
|
|
@ -76,7 +61,7 @@ describe('postProcess', () => {
|
|||
);
|
||||
});
|
||||
|
||||
test('corrects collapsed state inconsistencies', () => {
|
||||
it('corrects collapsed state inconsistencies', () => {
|
||||
expect(
|
||||
postProcessSidebars(
|
||||
{
|
||||
|
|
@ -96,25 +81,7 @@ describe('postProcess', () => {
|
|||
version: {versionPath: 'version'},
|
||||
},
|
||||
),
|
||||
).toMatchInlineSnapshot(`
|
||||
Object {
|
||||
"sidebar": Array [
|
||||
Object {
|
||||
"collapsed": false,
|
||||
"collapsible": false,
|
||||
"items": Array [
|
||||
Object {
|
||||
"id": "foo",
|
||||
"type": "doc",
|
||||
},
|
||||
],
|
||||
"label": "Category",
|
||||
"link": undefined,
|
||||
"type": "category",
|
||||
},
|
||||
],
|
||||
}
|
||||
`);
|
||||
).toMatchSnapshot();
|
||||
|
||||
expect(
|
||||
postProcessSidebars(
|
||||
|
|
@ -134,25 +101,7 @@ describe('postProcess', () => {
|
|||
version: {versionPath: 'version'},
|
||||
},
|
||||
),
|
||||
).toMatchInlineSnapshot(`
|
||||
Object {
|
||||
"sidebar": Array [
|
||||
Object {
|
||||
"collapsed": false,
|
||||
"collapsible": false,
|
||||
"items": Array [
|
||||
Object {
|
||||
"id": "foo",
|
||||
"type": "doc",
|
||||
},
|
||||
],
|
||||
"label": "Category",
|
||||
"link": undefined,
|
||||
"type": "category",
|
||||
},
|
||||
],
|
||||
}
|
||||
`);
|
||||
).toMatchSnapshot();
|
||||
|
||||
expect(
|
||||
postProcessSidebars(
|
||||
|
|
@ -171,24 +120,6 @@ describe('postProcess', () => {
|
|||
version: {versionPath: 'version'},
|
||||
},
|
||||
),
|
||||
).toMatchInlineSnapshot(`
|
||||
Object {
|
||||
"sidebar": Array [
|
||||
Object {
|
||||
"collapsed": false,
|
||||
"collapsible": false,
|
||||
"items": Array [
|
||||
Object {
|
||||
"id": "foo",
|
||||
"type": "doc",
|
||||
},
|
||||
],
|
||||
"label": "Category",
|
||||
"link": undefined,
|
||||
"type": "category",
|
||||
},
|
||||
],
|
||||
}
|
||||
`);
|
||||
).toMatchSnapshot();
|
||||
});
|
||||
});
|
||||
|
|
|
|||
|
|
@ -66,7 +66,7 @@ describe('processSidebars', () => {
|
|||
});
|
||||
}
|
||||
|
||||
test('let sidebars without autogenerated items untouched', async () => {
|
||||
it('leaves sidebars without autogenerated items untouched', async () => {
|
||||
const unprocessedSidebars: NormalizedSidebars = {
|
||||
someSidebar: [
|
||||
{type: 'doc', id: 'doc1'},
|
||||
|
|
@ -96,7 +96,7 @@ describe('processSidebars', () => {
|
|||
expect(processedSidebar).toEqual(unprocessedSidebars);
|
||||
});
|
||||
|
||||
test('replace autogenerated items by generated sidebars slices', async () => {
|
||||
it('replaces autogenerated items by generated sidebars slices', async () => {
|
||||
const unprocessedSidebars: NormalizedSidebars = {
|
||||
someSidebar: [
|
||||
{type: 'doc', id: 'doc1'},
|
||||
|
|
@ -199,7 +199,7 @@ describe('processSidebars', () => {
|
|||
} as ProcessedSidebars);
|
||||
});
|
||||
|
||||
test('ensure generated items are normalized', async () => {
|
||||
it('ensures generated items are normalized', async () => {
|
||||
const sidebarSliceContainingCategoryGeneratedIndex: NormalizedSidebar = [
|
||||
{
|
||||
type: 'category',
|
||||
|
|
|
|||
|
|
@ -131,11 +131,11 @@ describe('createSidebarsUtils', () => {
|
|||
getFirstLink,
|
||||
} = createSidebarsUtils(sidebars);
|
||||
|
||||
test('getFirstDocIdOfFirstSidebar', async () => {
|
||||
it('getFirstDocIdOfFirstSidebar', async () => {
|
||||
expect(getFirstDocIdOfFirstSidebar()).toEqual('doc1');
|
||||
});
|
||||
|
||||
test('getSidebarNameByDocId', async () => {
|
||||
it('getSidebarNameByDocId', async () => {
|
||||
expect(getSidebarNameByDocId('doc1')).toEqual('sidebar1');
|
||||
expect(getSidebarNameByDocId('doc2')).toEqual('sidebar1');
|
||||
expect(getSidebarNameByDocId('doc3')).toEqual('sidebar2');
|
||||
|
|
@ -146,7 +146,7 @@ describe('createSidebarsUtils', () => {
|
|||
expect(getSidebarNameByDocId('unknown_id')).toEqual(undefined);
|
||||
});
|
||||
|
||||
test('getDocNavigation', async () => {
|
||||
it('getDocNavigation', async () => {
|
||||
expect(getDocNavigation('doc1', 'doc1', undefined)).toEqual({
|
||||
sidebarName: 'sidebar1',
|
||||
previous: undefined,
|
||||
|
|
@ -226,7 +226,7 @@ describe('createSidebarsUtils', () => {
|
|||
});
|
||||
});
|
||||
|
||||
test('getCategoryGeneratedIndexNavigation', async () => {
|
||||
it('getCategoryGeneratedIndexNavigation', async () => {
|
||||
expect(
|
||||
getCategoryGeneratedIndexNavigation('/s3-subcategory-index-permalink'),
|
||||
).toMatchObject({
|
||||
|
|
@ -256,7 +256,7 @@ describe('createSidebarsUtils', () => {
|
|||
});
|
||||
});
|
||||
|
||||
test('getCategoryGeneratedIndexList', async () => {
|
||||
it('getCategoryGeneratedIndexList', async () => {
|
||||
expect(getCategoryGeneratedIndexList()).toMatchObject([
|
||||
{
|
||||
type: 'category',
|
||||
|
|
@ -273,7 +273,7 @@ describe('createSidebarsUtils', () => {
|
|||
]);
|
||||
});
|
||||
|
||||
test('getFirstLink', () => {
|
||||
it('getFirstLink', () => {
|
||||
expect(getFirstLink('sidebar1')).toEqual({
|
||||
id: 'doc1',
|
||||
type: 'doc',
|
||||
|
|
@ -298,7 +298,7 @@ describe('createSidebarsUtils', () => {
|
|||
});
|
||||
|
||||
describe('collectSidebarDocItems', () => {
|
||||
test('can collect docs', async () => {
|
||||
it('can collect docs', async () => {
|
||||
const sidebar: Sidebar = [
|
||||
{
|
||||
type: 'category',
|
||||
|
|
@ -354,7 +354,7 @@ describe('collectSidebarDocItems', () => {
|
|||
});
|
||||
|
||||
describe('collectSidebarCategories', () => {
|
||||
test('can collect categories', async () => {
|
||||
it('can collect categories', async () => {
|
||||
const sidebar: Sidebar = [
|
||||
{
|
||||
type: 'category',
|
||||
|
|
@ -412,7 +412,7 @@ describe('collectSidebarCategories', () => {
|
|||
});
|
||||
|
||||
describe('collectSidebarLinks', () => {
|
||||
test('can collect links', async () => {
|
||||
it('can collect links', async () => {
|
||||
const sidebar: Sidebar = [
|
||||
{
|
||||
type: 'category',
|
||||
|
|
@ -450,7 +450,7 @@ describe('collectSidebarLinks', () => {
|
|||
});
|
||||
|
||||
describe('collectSidebarsDocIds', () => {
|
||||
test('can collect sidebars doc items', async () => {
|
||||
it('can collect sidebars doc items', async () => {
|
||||
const sidebar1: Sidebar = [
|
||||
{
|
||||
type: 'category',
|
||||
|
|
@ -496,7 +496,7 @@ describe('collectSidebarsDocIds', () => {
|
|||
});
|
||||
|
||||
describe('transformSidebarItems', () => {
|
||||
test('can transform sidebar items', async () => {
|
||||
it('can transform sidebar items', async () => {
|
||||
const sidebar: Sidebar = [
|
||||
{
|
||||
type: 'category',
|
||||
|
|
@ -606,7 +606,7 @@ describe('toDocNavigationLink', () => {
|
|||
return data as DocMetadataBase;
|
||||
}
|
||||
|
||||
test('with no front matter', () => {
|
||||
it('with no front matter', () => {
|
||||
expect(
|
||||
toDocNavigationLink(
|
||||
testDoc({
|
||||
|
|
@ -621,7 +621,7 @@ describe('toDocNavigationLink', () => {
|
|||
} as DocNavLink);
|
||||
});
|
||||
|
||||
test('with pagination_label front matter', () => {
|
||||
it('with pagination_label front matter', () => {
|
||||
expect(
|
||||
toDocNavigationLink(
|
||||
testDoc({
|
||||
|
|
@ -638,7 +638,7 @@ describe('toDocNavigationLink', () => {
|
|||
} as DocNavLink);
|
||||
});
|
||||
|
||||
test('with sidebar_label front matter', () => {
|
||||
it('with sidebar_label front matter', () => {
|
||||
expect(
|
||||
toDocNavigationLink(
|
||||
testDoc({
|
||||
|
|
@ -655,7 +655,7 @@ describe('toDocNavigationLink', () => {
|
|||
} as DocNavLink);
|
||||
});
|
||||
|
||||
test('with pagination_label + sidebar_label front matter', () => {
|
||||
it('with pagination_label + sidebar_label front matter', () => {
|
||||
expect(
|
||||
toDocNavigationLink(
|
||||
testDoc({
|
||||
|
|
@ -691,7 +691,7 @@ describe('toNavigationLink', () => {
|
|||
}),
|
||||
};
|
||||
|
||||
test('with doc items', () => {
|
||||
it('with doc items', () => {
|
||||
expect(toNavigationLink({type: 'doc', id: 'doc1'}, docsById)).toEqual(
|
||||
toDocNavigationLink(docsById.doc1),
|
||||
);
|
||||
|
|
@ -705,7 +705,7 @@ describe('toNavigationLink', () => {
|
|||
);
|
||||
});
|
||||
|
||||
test('with category item and doc link', () => {
|
||||
it('with category item and doc link', () => {
|
||||
expect(
|
||||
toNavigationLink(
|
||||
{
|
||||
|
|
@ -742,7 +742,7 @@ describe('toNavigationLink', () => {
|
|||
);
|
||||
});
|
||||
|
||||
test('with category item and generated-index link', () => {
|
||||
it('with category item and generated-index link', () => {
|
||||
expect(
|
||||
toNavigationLink(
|
||||
{
|
||||
|
|
|
|||
|
|
@ -9,7 +9,7 @@ import {validateSidebars, validateCategoryMetadataFile} from '../validation';
|
|||
import type {SidebarsConfig, CategoryMetadataFile} from '../types';
|
||||
|
||||
describe('validateSidebars', () => {
|
||||
test('throw for bad value', async () => {
|
||||
it('throw for bad value', async () => {
|
||||
expect(() => validateSidebars({sidebar: [{type: 42}]}))
|
||||
.toThrowErrorMatchingInlineSnapshot(`
|
||||
"{
|
||||
|
|
@ -21,12 +21,12 @@ describe('validateSidebars', () => {
|
|||
`);
|
||||
});
|
||||
|
||||
test('accept empty object', async () => {
|
||||
it('accept empty object', async () => {
|
||||
const sidebars: SidebarsConfig = {};
|
||||
validateSidebars(sidebars);
|
||||
});
|
||||
|
||||
test('accept valid values', async () => {
|
||||
it('accept valid values', async () => {
|
||||
const sidebars: SidebarsConfig = {
|
||||
sidebar1: [
|
||||
{type: 'doc', id: 'doc1'},
|
||||
|
|
@ -41,17 +41,19 @@ describe('validateSidebars', () => {
|
|||
validateSidebars(sidebars);
|
||||
});
|
||||
|
||||
test('sidebar category wrong label', () => {
|
||||
expect(() =>
|
||||
validateSidebars({
|
||||
docs: [
|
||||
{
|
||||
type: 'category',
|
||||
label: true,
|
||||
items: [{type: 'doc', id: 'doc1'}],
|
||||
},
|
||||
],
|
||||
}),
|
||||
it('sidebar category wrong label', () => {
|
||||
expect(
|
||||
() =>
|
||||
validateSidebars({
|
||||
docs: [
|
||||
{
|
||||
type: 'category',
|
||||
label: true,
|
||||
items: [{type: 'doc', id: 'doc1'}],
|
||||
},
|
||||
],
|
||||
}),
|
||||
// eslint-disable-next-line jest/no-large-snapshots
|
||||
).toThrowErrorMatchingInlineSnapshot(`
|
||||
"{
|
||||
\\"type\\": \\"category\\",
|
||||
|
|
@ -68,7 +70,7 @@ describe('validateSidebars', () => {
|
|||
`);
|
||||
});
|
||||
|
||||
test('sidebars link wrong label', () => {
|
||||
it('sidebars link wrong label', () => {
|
||||
expect(() =>
|
||||
validateSidebars({
|
||||
docs: [
|
||||
|
|
@ -90,7 +92,7 @@ describe('validateSidebars', () => {
|
|||
`);
|
||||
});
|
||||
|
||||
test('sidebars link wrong href', () => {
|
||||
it('sidebars link wrong href', () => {
|
||||
expect(() =>
|
||||
validateSidebars({
|
||||
docs: [
|
||||
|
|
@ -114,7 +116,7 @@ describe('validateSidebars', () => {
|
|||
`);
|
||||
});
|
||||
|
||||
test('sidebars with unknown sidebar item type', () => {
|
||||
it('sidebars with unknown sidebar item type', () => {
|
||||
expect(() =>
|
||||
validateSidebars({
|
||||
docs: [
|
||||
|
|
@ -133,7 +135,7 @@ describe('validateSidebars', () => {
|
|||
`);
|
||||
});
|
||||
|
||||
test('sidebars category missing items', () => {
|
||||
it('sidebars category missing items', () => {
|
||||
expect(() =>
|
||||
validateSidebars({
|
||||
docs: [
|
||||
|
|
@ -159,7 +161,7 @@ describe('validateSidebars', () => {
|
|||
`);
|
||||
});
|
||||
|
||||
test('sidebars category wrong field', () => {
|
||||
it('sidebars category wrong field', () => {
|
||||
expect(() =>
|
||||
validateSidebars({
|
||||
docs: [
|
||||
|
|
@ -188,7 +190,7 @@ describe('validateSidebars', () => {
|
|||
`);
|
||||
});
|
||||
|
||||
test('sidebar category wrong items', () => {
|
||||
it('sidebar category wrong items', () => {
|
||||
expect(() =>
|
||||
validateSidebars({
|
||||
docs: {
|
||||
|
|
@ -204,7 +206,7 @@ describe('validateSidebars', () => {
|
|||
).toThrowErrorMatchingInlineSnapshot(`"sidebar.forEach is not a function"`);
|
||||
});
|
||||
|
||||
test('sidebars item doc but id is not a string', async () => {
|
||||
it('sidebars item doc but id is not a string', async () => {
|
||||
expect(() =>
|
||||
validateSidebars({
|
||||
docs: [
|
||||
|
|
@ -226,7 +228,7 @@ describe('validateSidebars', () => {
|
|||
`);
|
||||
});
|
||||
|
||||
test('HTML type requires a value', () => {
|
||||
it('hTML type requires a value', () => {
|
||||
const sidebars: SidebarsConfig = {
|
||||
sidebar1: [
|
||||
{
|
||||
|
|
@ -246,7 +248,7 @@ describe('validateSidebars', () => {
|
|||
`);
|
||||
});
|
||||
|
||||
test('HTML type accepts valid values', () => {
|
||||
it('hTML type accepts valid values', () => {
|
||||
const sidebars: SidebarsConfig = {
|
||||
sidebar1: [
|
||||
{
|
||||
|
|
@ -264,7 +266,7 @@ describe('validateSidebars', () => {
|
|||
describe('validateCategoryMetadataFile', () => {
|
||||
// TODO add more tests
|
||||
|
||||
test('throw for bad value', async () => {
|
||||
it('throw for bad value', async () => {
|
||||
expect(() =>
|
||||
validateCategoryMetadataFile(42),
|
||||
).toThrowErrorMatchingInlineSnapshot(
|
||||
|
|
@ -272,12 +274,12 @@ describe('validateCategoryMetadataFile', () => {
|
|||
);
|
||||
});
|
||||
|
||||
test('accept empty object', async () => {
|
||||
it('accept empty object', async () => {
|
||||
const content: CategoryMetadataFile = {};
|
||||
expect(validateCategoryMetadataFile(content)).toEqual(content);
|
||||
});
|
||||
|
||||
test('accept valid values', async () => {
|
||||
it('accept valid values', async () => {
|
||||
const content: CategoryMetadataFile = {
|
||||
className: 'className',
|
||||
label: 'Category Label',
|
||||
|
|
@ -294,7 +296,7 @@ describe('validateCategoryMetadataFile', () => {
|
|||
expect(validateCategoryMetadataFile(content)).toEqual(content);
|
||||
});
|
||||
|
||||
test('rejects permalink', async () => {
|
||||
it('rejects permalink', async () => {
|
||||
const content: CategoryMetadataFile = {
|
||||
className: 'className',
|
||||
label: 'Category Label',
|
||||
|
|
|
|||
|
|
@ -0,0 +1,105 @@
|
|||
// Jest Snapshot v1, https://goo.gl/fbAQLP
|
||||
|
||||
exports[`docusaurus-plugin-content-pages loads simple pages 1`] = `
|
||||
Array [
|
||||
Object {
|
||||
"permalink": "/",
|
||||
"source": "@site/src/pages/index.js",
|
||||
"type": "jsx",
|
||||
},
|
||||
Object {
|
||||
"permalink": "/typescript",
|
||||
"source": "@site/src/pages/typescript.tsx",
|
||||
"type": "jsx",
|
||||
},
|
||||
Object {
|
||||
"description": "Markdown index page",
|
||||
"frontMatter": Object {},
|
||||
"permalink": "/hello/",
|
||||
"source": "@site/src/pages/hello/index.md",
|
||||
"title": "Index",
|
||||
"type": "mdx",
|
||||
},
|
||||
Object {
|
||||
"description": "my mdx page",
|
||||
"frontMatter": Object {
|
||||
"description": "my mdx page",
|
||||
"title": "mdx page",
|
||||
},
|
||||
"permalink": "/hello/mdxPage",
|
||||
"source": "@site/src/pages/hello/mdxPage.mdx",
|
||||
"title": "mdx page",
|
||||
"type": "mdx",
|
||||
},
|
||||
Object {
|
||||
"permalink": "/hello/translatedJs",
|
||||
"source": "@site/src/pages/hello/translatedJs.js",
|
||||
"type": "jsx",
|
||||
},
|
||||
Object {
|
||||
"description": "translated markdown page",
|
||||
"frontMatter": Object {},
|
||||
"permalink": "/hello/translatedMd",
|
||||
"source": "@site/src/pages/hello/translatedMd.md",
|
||||
"title": undefined,
|
||||
"type": "mdx",
|
||||
},
|
||||
Object {
|
||||
"permalink": "/hello/world",
|
||||
"source": "@site/src/pages/hello/world.js",
|
||||
"type": "jsx",
|
||||
},
|
||||
]
|
||||
`;
|
||||
|
||||
exports[`docusaurus-plugin-content-pages loads simple pages with french translations 1`] = `
|
||||
Array [
|
||||
Object {
|
||||
"permalink": "/",
|
||||
"source": "@site/src/pages/index.js",
|
||||
"type": "jsx",
|
||||
},
|
||||
Object {
|
||||
"permalink": "/typescript",
|
||||
"source": "@site/src/pages/typescript.tsx",
|
||||
"type": "jsx",
|
||||
},
|
||||
Object {
|
||||
"description": "Markdown index page",
|
||||
"frontMatter": Object {},
|
||||
"permalink": "/hello/",
|
||||
"source": "@site/src/pages/hello/index.md",
|
||||
"title": "Index",
|
||||
"type": "mdx",
|
||||
},
|
||||
Object {
|
||||
"description": "my mdx page",
|
||||
"frontMatter": Object {
|
||||
"description": "my mdx page",
|
||||
"title": "mdx page",
|
||||
},
|
||||
"permalink": "/hello/mdxPage",
|
||||
"source": "@site/src/pages/hello/mdxPage.mdx",
|
||||
"title": "mdx page",
|
||||
"type": "mdx",
|
||||
},
|
||||
Object {
|
||||
"permalink": "/hello/translatedJs",
|
||||
"source": "@site/i18n/fr/docusaurus-plugin-content-pages/hello/translatedJs.js",
|
||||
"type": "jsx",
|
||||
},
|
||||
Object {
|
||||
"description": "translated markdown page (fr)",
|
||||
"frontMatter": Object {},
|
||||
"permalink": "/hello/translatedMd",
|
||||
"source": "@site/i18n/fr/docusaurus-plugin-content-pages/hello/translatedMd.md",
|
||||
"title": undefined,
|
||||
"type": "mdx",
|
||||
},
|
||||
Object {
|
||||
"permalink": "/hello/world",
|
||||
"source": "@site/src/pages/hello/world.js",
|
||||
"type": "jsx",
|
||||
},
|
||||
]
|
||||
`;
|
||||
|
|
@ -12,83 +12,23 @@ import pluginContentPages from '../index';
|
|||
import {PluginOptionSchema} from '../pluginOptionSchema';
|
||||
|
||||
describe('docusaurus-plugin-content-pages', () => {
|
||||
test('simple pages', async () => {
|
||||
it('loads simple pages', async () => {
|
||||
const siteDir = path.join(__dirname, '__fixtures__', 'website');
|
||||
const context = await loadContext(siteDir);
|
||||
const pluginPath = 'src/pages';
|
||||
const plugin = await pluginContentPages(
|
||||
context,
|
||||
PluginOptionSchema.validate({
|
||||
path: pluginPath,
|
||||
path: 'src/pages',
|
||||
}).value,
|
||||
);
|
||||
const pagesMetadata = await plugin.loadContent?.();
|
||||
const pagesMetadata = await plugin.loadContent!();
|
||||
|
||||
expect(pagesMetadata).toEqual([
|
||||
{
|
||||
type: 'jsx',
|
||||
permalink: '/',
|
||||
source: path.posix.join('@site', pluginPath, 'index.js'),
|
||||
},
|
||||
{
|
||||
type: 'jsx',
|
||||
permalink: '/typescript',
|
||||
source: path.posix.join('@site', pluginPath, 'typescript.tsx'),
|
||||
},
|
||||
{
|
||||
type: 'mdx',
|
||||
permalink: '/hello/',
|
||||
source: path.posix.join('@site', pluginPath, 'hello', 'index.md'),
|
||||
description: 'Markdown index page',
|
||||
frontMatter: {},
|
||||
title: 'Index',
|
||||
},
|
||||
{
|
||||
type: 'mdx',
|
||||
permalink: '/hello/mdxPage',
|
||||
source: path.posix.join('@site', pluginPath, 'hello', 'mdxPage.mdx'),
|
||||
description: 'my mdx page',
|
||||
title: 'mdx page',
|
||||
frontMatter: {
|
||||
description: 'my mdx page',
|
||||
title: 'mdx page',
|
||||
},
|
||||
},
|
||||
{
|
||||
type: 'jsx',
|
||||
permalink: '/hello/translatedJs',
|
||||
source: path.posix.join(
|
||||
'@site',
|
||||
pluginPath,
|
||||
'hello',
|
||||
'translatedJs.js',
|
||||
),
|
||||
},
|
||||
{
|
||||
type: 'mdx',
|
||||
permalink: '/hello/translatedMd',
|
||||
source: path.posix.join(
|
||||
'@site',
|
||||
pluginPath,
|
||||
'hello',
|
||||
'translatedMd.md',
|
||||
),
|
||||
description: 'translated markdown page',
|
||||
frontMatter: {},
|
||||
title: undefined,
|
||||
},
|
||||
{
|
||||
type: 'jsx',
|
||||
permalink: '/hello/world',
|
||||
source: path.posix.join('@site', pluginPath, 'hello', 'world.js'),
|
||||
},
|
||||
]);
|
||||
expect(pagesMetadata).toMatchSnapshot();
|
||||
});
|
||||
|
||||
test('simple pages with french translations', async () => {
|
||||
it('loads simple pages with french translations', async () => {
|
||||
const siteDir = path.join(__dirname, '__fixtures__', 'website');
|
||||
const context = await loadContext(siteDir);
|
||||
const pluginPath = 'src/pages';
|
||||
const plugin = await pluginContentPages(
|
||||
{
|
||||
...context,
|
||||
|
|
@ -98,66 +38,11 @@ describe('docusaurus-plugin-content-pages', () => {
|
|||
},
|
||||
},
|
||||
PluginOptionSchema.validate({
|
||||
path: pluginPath,
|
||||
path: 'src/pages',
|
||||
}).value,
|
||||
);
|
||||
const pagesMetadata = await plugin.loadContent?.();
|
||||
const pagesMetadata = await plugin.loadContent!();
|
||||
|
||||
const frTranslationsPath = path.posix.join(
|
||||
'@site',
|
||||
'i18n',
|
||||
'fr',
|
||||
'docusaurus-plugin-content-pages',
|
||||
);
|
||||
|
||||
expect(pagesMetadata).toEqual([
|
||||
{
|
||||
type: 'jsx',
|
||||
permalink: '/',
|
||||
source: path.posix.join('@site', pluginPath, 'index.js'),
|
||||
},
|
||||
{
|
||||
type: 'jsx',
|
||||
permalink: '/typescript',
|
||||
source: path.posix.join('@site', pluginPath, 'typescript.tsx'),
|
||||
},
|
||||
{
|
||||
type: 'mdx',
|
||||
permalink: '/hello/',
|
||||
source: path.posix.join('@site', pluginPath, 'hello', 'index.md'),
|
||||
description: 'Markdown index page',
|
||||
frontMatter: {},
|
||||
title: 'Index',
|
||||
},
|
||||
{
|
||||
type: 'mdx',
|
||||
permalink: '/hello/mdxPage',
|
||||
source: path.posix.join('@site', pluginPath, 'hello', 'mdxPage.mdx'),
|
||||
description: 'my mdx page',
|
||||
title: 'mdx page',
|
||||
frontMatter: {
|
||||
description: 'my mdx page',
|
||||
title: 'mdx page',
|
||||
},
|
||||
},
|
||||
{
|
||||
type: 'jsx',
|
||||
permalink: '/hello/translatedJs',
|
||||
source: path.posix.join(frTranslationsPath, 'hello', 'translatedJs.js'),
|
||||
},
|
||||
{
|
||||
type: 'mdx',
|
||||
permalink: '/hello/translatedMd',
|
||||
source: path.posix.join(frTranslationsPath, 'hello', 'translatedMd.md'),
|
||||
description: 'translated markdown page (fr)',
|
||||
frontMatter: {},
|
||||
title: undefined,
|
||||
},
|
||||
{
|
||||
type: 'jsx',
|
||||
permalink: '/hello/world',
|
||||
source: path.posix.join('@site', pluginPath, 'hello', 'world.js'),
|
||||
},
|
||||
]);
|
||||
expect(pagesMetadata).toMatchSnapshot();
|
||||
});
|
||||
});
|
||||
|
|
|
|||
|
|
@ -22,17 +22,17 @@ function normalizePluginOptions(
|
|||
}
|
||||
|
||||
describe('normalizePagesPluginOptions', () => {
|
||||
test('should return default options for undefined user options', () => {
|
||||
it('returns default options for undefined user options', () => {
|
||||
const value = normalizePluginOptions({});
|
||||
expect(value).toEqual(DEFAULT_OPTIONS);
|
||||
});
|
||||
|
||||
test('should fill in default options for partially defined user options', () => {
|
||||
it('fills in default options for partially defined user options', () => {
|
||||
const value = normalizePluginOptions({path: 'src/pages'});
|
||||
expect(value).toEqual(DEFAULT_OPTIONS);
|
||||
});
|
||||
|
||||
test('should accept correctly defined user options', () => {
|
||||
it('accepts correctly defined user options', () => {
|
||||
const userOptions = {
|
||||
path: 'src/my-pages',
|
||||
routeBasePath: 'my-pages',
|
||||
|
|
@ -43,7 +43,7 @@ describe('normalizePagesPluginOptions', () => {
|
|||
expect(value).toEqual({...DEFAULT_OPTIONS, ...userOptions});
|
||||
});
|
||||
|
||||
test('should reject bad path inputs', () => {
|
||||
it('rejects bad path inputs', () => {
|
||||
expect(() => {
|
||||
normalizePluginOptions({
|
||||
// @ts-expect-error: bad attribute
|
||||
|
|
|
|||
|
|
@ -10,7 +10,7 @@ import type {DocusaurusConfig} from '@docusaurus/types';
|
|||
import {EnumChangefreq} from 'sitemap';
|
||||
|
||||
describe('createSitemap', () => {
|
||||
test('simple site', async () => {
|
||||
it('simple site', async () => {
|
||||
const sitemap = await createSitemap(
|
||||
{
|
||||
url: 'https://example.com',
|
||||
|
|
@ -26,14 +26,14 @@ describe('createSitemap', () => {
|
|||
);
|
||||
});
|
||||
|
||||
test('empty site', () =>
|
||||
it('empty site', () =>
|
||||
expect(async () => {
|
||||
await createSitemap({} as DocusaurusConfig, [], {});
|
||||
}).rejects.toThrow(
|
||||
'URL in docusaurus.config.js cannot be empty/undefined.',
|
||||
));
|
||||
|
||||
test('exclusion of 404 page', async () => {
|
||||
it('exclusion of 404 page', async () => {
|
||||
const sitemap = await createSitemap(
|
||||
{
|
||||
url: 'https://example.com',
|
||||
|
|
@ -47,7 +47,7 @@ describe('createSitemap', () => {
|
|||
expect(sitemap).not.toContain('404');
|
||||
});
|
||||
|
||||
test('keep trailing slash unchanged', async () => {
|
||||
it('keep trailing slash unchanged', async () => {
|
||||
const sitemap = await createSitemap(
|
||||
{
|
||||
url: 'https://example.com',
|
||||
|
|
@ -66,7 +66,7 @@ describe('createSitemap', () => {
|
|||
expect(sitemap).toContain('<loc>https://example.com/nested/test2/</loc>');
|
||||
});
|
||||
|
||||
test('add trailing slash', async () => {
|
||||
it('add trailing slash', async () => {
|
||||
const sitemap = await createSitemap(
|
||||
{
|
||||
url: 'https://example.com',
|
||||
|
|
@ -85,7 +85,7 @@ describe('createSitemap', () => {
|
|||
expect(sitemap).toContain('<loc>https://example.com/nested/test2/</loc>');
|
||||
});
|
||||
|
||||
test('remove trailing slash', async () => {
|
||||
it('remove trailing slash', async () => {
|
||||
const sitemap = await createSitemap(
|
||||
{
|
||||
url: 'https://example.com',
|
||||
|
|
|
|||
|
|
@ -19,21 +19,21 @@ function normalizePluginOptions(options) {
|
|||
}
|
||||
|
||||
describe('normalizeSitemapPluginOptions', () => {
|
||||
test('should return default values for empty user options', async () => {
|
||||
const {value} = await PluginOptionSchema.validate({});
|
||||
it('returns default values for empty user options', () => {
|
||||
const {value} = PluginOptionSchema.validate({});
|
||||
expect(value).toEqual(DEFAULT_OPTIONS);
|
||||
});
|
||||
|
||||
test('should accept correctly defined user options', async () => {
|
||||
it('accepts correctly defined user options', () => {
|
||||
const userOptions = {
|
||||
changefreq: 'yearly',
|
||||
priority: 0.9,
|
||||
};
|
||||
const {value} = await PluginOptionSchema.validate(userOptions);
|
||||
const {value} = PluginOptionSchema.validate(userOptions);
|
||||
expect(value).toEqual(userOptions);
|
||||
});
|
||||
|
||||
test('should reject out-of-range priority inputs', () => {
|
||||
it('rejects out-of-range priority inputs', () => {
|
||||
expect(() => {
|
||||
normalizePluginOptions({
|
||||
priority: 2,
|
||||
|
|
@ -43,7 +43,7 @@ describe('normalizeSitemapPluginOptions', () => {
|
|||
);
|
||||
});
|
||||
|
||||
test('should reject bad changefreq inputs', () => {
|
||||
it('rejects bad changefreq inputs', () => {
|
||||
expect(() => {
|
||||
normalizePluginOptions({
|
||||
changefreq: 'annually',
|
||||
|
|
|
|||
|
|
@ -1,6 +1,6 @@
|
|||
// Jest Snapshot v1, https://goo.gl/fbAQLP
|
||||
|
||||
exports[`npm2yarn plugin test: already imported tabs components above are not re-imported 1`] = `
|
||||
exports[`npm2yarn plugin does not re-import tabs components when already imported above 1`] = `
|
||||
"import Tabs from '@theme/Tabs';
|
||||
|
||||
import TabItem from '@theme/TabItem';
|
||||
|
|
@ -24,7 +24,7 @@ import TabItem from '@theme/TabItem';
|
|||
"
|
||||
`;
|
||||
|
||||
exports[`npm2yarn plugin test: already imported tabs components below are not re-imported 1`] = `
|
||||
exports[`npm2yarn plugin tdoes not re-import tabs components when already imported below 1`] = `
|
||||
"<Tabs>
|
||||
<TabItem value=\\"npm\\">
|
||||
|
||||
|
|
@ -48,7 +48,7 @@ import TabItem from '@theme/TabItem';
|
|||
"
|
||||
`;
|
||||
|
||||
exports[`npm2yarn plugin test: installation file 1`] = `
|
||||
exports[`npm2yarn plugin works on installation file 1`] = `
|
||||
"import Tabs from '@theme/Tabs';
|
||||
import TabItem from '@theme/TabItem';
|
||||
|
||||
|
|
@ -71,22 +71,7 @@ import TabItem from '@theme/TabItem';
|
|||
"
|
||||
`;
|
||||
|
||||
exports[`npm2yarn plugin test: language was not set 1`] = `
|
||||
"\`\`\`npm2yarn
|
||||
npm install --save docusaurus-plugin-name
|
||||
\`\`\`
|
||||
|
||||
\`\`\`bash
|
||||
npm install --save docusaurus-plugin-name
|
||||
\`\`\`
|
||||
|
||||
\`\`\`shell
|
||||
npm install --save docusaurus-plugin-name
|
||||
\`\`\`
|
||||
"
|
||||
`;
|
||||
|
||||
exports[`npm2yarn plugin test: plugin file 1`] = `
|
||||
exports[`npm2yarn plugin works on plugin file 1`] = `
|
||||
"import Tabs from '@theme/Tabs';
|
||||
import TabItem from '@theme/TabItem';
|
||||
|
||||
|
|
@ -112,3 +97,18 @@ yarn add docusaurus-plugin-name
|
|||
</Tabs>
|
||||
"
|
||||
`;
|
||||
|
||||
exports[`npm2yarn plugin works when language is not set 1`] = `
|
||||
"\`\`\`npm2yarn
|
||||
npm install --save docusaurus-plugin-name
|
||||
\`\`\`
|
||||
|
||||
\`\`\`bash
|
||||
npm install --save docusaurus-plugin-name
|
||||
\`\`\`
|
||||
|
||||
\`\`\`shell
|
||||
npm install --save docusaurus-plugin-name
|
||||
\`\`\`
|
||||
"
|
||||
`;
|
||||
|
|
|
|||
|
|
@ -25,31 +25,31 @@ const processFixture = async (name: string, options?: {sync?: boolean}) => {
|
|||
};
|
||||
|
||||
describe('npm2yarn plugin', () => {
|
||||
test('test: installation file', async () => {
|
||||
it('works on installation file', async () => {
|
||||
const result = await processFixture('installation');
|
||||
|
||||
expect(result).toMatchSnapshot();
|
||||
});
|
||||
|
||||
test('test: plugin file', async () => {
|
||||
it('works on plugin file', async () => {
|
||||
const result = await processFixture('plugin');
|
||||
|
||||
expect(result).toMatchSnapshot();
|
||||
});
|
||||
|
||||
test('test: language was not set', async () => {
|
||||
it('works when language is not set', async () => {
|
||||
const result = await processFixture('syntax-not-properly-set');
|
||||
|
||||
expect(result).toMatchSnapshot();
|
||||
});
|
||||
|
||||
test('test: already imported tabs components above are not re-imported', async () => {
|
||||
it('does not re-import tabs components when already imported above', async () => {
|
||||
const result = await processFixture('import-tabs-above');
|
||||
|
||||
expect(result).toMatchSnapshot();
|
||||
});
|
||||
|
||||
test('test: already imported tabs components below are not re-imported', async () => {
|
||||
it('tdoes not re-import tabs components when already imported below', async () => {
|
||||
const result = await processFixture('import-tabs-below');
|
||||
|
||||
expect(result).toMatchSnapshot();
|
||||
|
|
|
|||
|
|
@ -1,6 +1,6 @@
|
|||
// Jest Snapshot v1, https://goo.gl/fbAQLP
|
||||
|
||||
exports[`getTranslationFiles should return translation files matching snapshot 1`] = `
|
||||
exports[`getTranslationFiles returns translation files matching snapshot 1`] = `
|
||||
Array [
|
||||
Object {
|
||||
"content": Object {
|
||||
|
|
@ -55,7 +55,7 @@ Array [
|
|||
]
|
||||
`;
|
||||
|
||||
exports[`getTranslationFiles should return translation files matching snapshot 2`] = `
|
||||
exports[`getTranslationFiles returns translation files matching snapshot 2`] = `
|
||||
Array [
|
||||
Object {
|
||||
"content": Object {
|
||||
|
|
@ -98,7 +98,7 @@ Array [
|
|||
]
|
||||
`;
|
||||
|
||||
exports[`translateThemeConfig should return translated themeConfig matching snapshot 1`] = `
|
||||
exports[`translateThemeConfig returns translated themeConfig 1`] = `
|
||||
Object {
|
||||
"announcementBar": Object {},
|
||||
"colorMode": Object {},
|
||||
|
|
|
|||
|
|
@ -79,7 +79,7 @@ function getSampleTranslationFilesTranslated(themeConfig: ThemeConfig) {
|
|||
}
|
||||
|
||||
describe('getTranslationFiles', () => {
|
||||
test('should return translation files matching snapshot', () => {
|
||||
it('returns translation files matching snapshot', () => {
|
||||
expect(getSampleTranslationFiles(ThemeConfigSample)).toMatchSnapshot();
|
||||
expect(
|
||||
getSampleTranslationFiles(ThemeConfigSampleSimpleFooter),
|
||||
|
|
@ -88,7 +88,7 @@ describe('getTranslationFiles', () => {
|
|||
});
|
||||
|
||||
describe('translateThemeConfig', () => {
|
||||
test('should not translate anything if translation files are untranslated', () => {
|
||||
it('does not translate anything if translation files are untranslated', () => {
|
||||
expect(
|
||||
translateThemeConfig({
|
||||
themeConfig: ThemeConfigSample,
|
||||
|
|
@ -97,7 +97,7 @@ describe('translateThemeConfig', () => {
|
|||
).toEqual(ThemeConfigSample);
|
||||
});
|
||||
|
||||
test('should return translated themeConfig matching snapshot', () => {
|
||||
it('returns translated themeConfig', () => {
|
||||
expect(
|
||||
translateThemeConfig({
|
||||
themeConfig: ThemeConfigSample,
|
||||
|
|
@ -117,17 +117,17 @@ describe('getTranslationFiles and translateThemeConfig isomorphism', () => {
|
|||
expect(translatedThemeConfig).toEqual(themeConfig);
|
||||
}
|
||||
|
||||
test('should be verified for sample', () => {
|
||||
it('is verified for sample', () => {
|
||||
verifyIsomorphism(ThemeConfigSample);
|
||||
});
|
||||
|
||||
test('should be verified for sample with simple footer', () => {
|
||||
it('is verified for sample with simple footer', () => {
|
||||
verifyIsomorphism(ThemeConfigSampleSimpleFooter);
|
||||
});
|
||||
|
||||
// undefined footer should not make the translation code crash
|
||||
// See https://github.com/facebook/docusaurus/issues/3936
|
||||
test('should be verified for sample without footer', () => {
|
||||
it('is verified for sample without footer', () => {
|
||||
verifyIsomorphism({...ThemeConfigSample, footer: undefined});
|
||||
});
|
||||
});
|
||||
|
|
|
|||
|
|
@ -29,7 +29,7 @@ function testOk(partialThemeConfig: Record<string, unknown>) {
|
|||
}
|
||||
|
||||
describe('themeConfig', () => {
|
||||
test('should accept valid theme config', () => {
|
||||
it('accepts valid theme config', () => {
|
||||
const userConfig = {
|
||||
prism: {
|
||||
theme,
|
||||
|
|
@ -101,7 +101,7 @@ describe('themeConfig', () => {
|
|||
});
|
||||
});
|
||||
|
||||
test('should allow possible types of navbar items', () => {
|
||||
it('allows possible types of navbar items', () => {
|
||||
const config = {
|
||||
navbar: {
|
||||
items: [
|
||||
|
|
@ -199,7 +199,7 @@ describe('themeConfig', () => {
|
|||
});
|
||||
});
|
||||
|
||||
test('should reject unknown navbar item type', () => {
|
||||
it('rejects unknown navbar item type', () => {
|
||||
const config = {
|
||||
navbar: {
|
||||
items: [
|
||||
|
|
@ -216,7 +216,7 @@ describe('themeConfig', () => {
|
|||
).toThrowErrorMatchingInlineSnapshot(`"Bad navbar item type joke"`);
|
||||
});
|
||||
|
||||
test('should reject nested dropdowns', () => {
|
||||
it('rejects nested dropdowns', () => {
|
||||
const config = {
|
||||
navbar: {
|
||||
items: [
|
||||
|
|
@ -243,7 +243,7 @@ describe('themeConfig', () => {
|
|||
).toThrowErrorMatchingInlineSnapshot(`"Nested dropdowns are not allowed"`);
|
||||
});
|
||||
|
||||
test('should reject nested dropdowns 2', () => {
|
||||
it('rejects nested dropdowns 2', () => {
|
||||
const config = {
|
||||
navbar: {
|
||||
items: [
|
||||
|
|
@ -260,7 +260,7 @@ describe('themeConfig', () => {
|
|||
).toThrowErrorMatchingInlineSnapshot(`"Nested dropdowns are not allowed"`);
|
||||
});
|
||||
|
||||
test('should reject position attribute within dropdown', () => {
|
||||
it('rejects position attribute within dropdown', () => {
|
||||
const config = {
|
||||
navbar: {
|
||||
items: [
|
||||
|
|
@ -285,7 +285,7 @@ describe('themeConfig', () => {
|
|||
);
|
||||
});
|
||||
|
||||
test('should give friendly error when href and to coexist', () => {
|
||||
it('gives friendly error when href and to coexist', () => {
|
||||
const config = {
|
||||
navbar: {
|
||||
items: [
|
||||
|
|
@ -305,7 +305,7 @@ describe('themeConfig', () => {
|
|||
);
|
||||
});
|
||||
|
||||
test('should allow empty alt tags for the logo image in the header', () => {
|
||||
it('allows empty alt tags for the logo image in the header', () => {
|
||||
const altTagConfig = {
|
||||
navbar: {
|
||||
logo: {
|
||||
|
|
@ -324,7 +324,7 @@ describe('themeConfig', () => {
|
|||
});
|
||||
});
|
||||
|
||||
test('should allow empty alt tags for the logo image in the footer', () => {
|
||||
it('allows empty alt tags for the logo image in the footer', () => {
|
||||
const partialConfig = {
|
||||
footer: {
|
||||
logo: {
|
||||
|
|
@ -344,7 +344,7 @@ describe('themeConfig', () => {
|
|||
});
|
||||
});
|
||||
|
||||
test('should allow simple links in footer', () => {
|
||||
it('allows simple links in footer', () => {
|
||||
const partialConfig = {
|
||||
footer: {
|
||||
links: [
|
||||
|
|
@ -378,7 +378,7 @@ describe('themeConfig', () => {
|
|||
});
|
||||
});
|
||||
|
||||
test('should allow footer column with no title', () => {
|
||||
it('allows footer column with no title', () => {
|
||||
const partialConfig = {
|
||||
footer: {
|
||||
links: [
|
||||
|
|
@ -414,7 +414,7 @@ describe('themeConfig', () => {
|
|||
});
|
||||
});
|
||||
|
||||
test('should reject mix of simple and multi-column links in footer', () => {
|
||||
it('rejects mix of simple and multi-column links in footer', () => {
|
||||
const partialConfig = {
|
||||
footer: {
|
||||
links: [
|
||||
|
|
@ -442,7 +442,7 @@ describe('themeConfig', () => {
|
|||
);
|
||||
});
|
||||
|
||||
test('should allow width and height specification for logo', () => {
|
||||
it('allows width and height specification for logo', () => {
|
||||
const altTagConfig = {
|
||||
navbar: {
|
||||
logo: {
|
||||
|
|
@ -463,7 +463,7 @@ describe('themeConfig', () => {
|
|||
});
|
||||
});
|
||||
|
||||
test('should accept valid prism config', () => {
|
||||
it('accepts valid prism config', () => {
|
||||
const prismConfig = {
|
||||
prism: {
|
||||
additionalLanguages: ['kotlin', 'java'],
|
||||
|
|
@ -476,25 +476,25 @@ describe('themeConfig', () => {
|
|||
});
|
||||
|
||||
describe('customCss config', () => {
|
||||
test('should accept customCss undefined', () => {
|
||||
it('accepts customCss undefined', () => {
|
||||
testOk({
|
||||
customCss: undefined,
|
||||
});
|
||||
});
|
||||
|
||||
test('should accept customCss string', () => {
|
||||
it('accepts customCss string', () => {
|
||||
testOk({
|
||||
customCss: './path/to/cssFile.css',
|
||||
});
|
||||
});
|
||||
|
||||
test('should accept customCss string array', () => {
|
||||
it('accepts customCss string array', () => {
|
||||
testOk({
|
||||
customCss: ['./path/to/cssFile.css', './path/to/cssFile2.css'],
|
||||
});
|
||||
});
|
||||
|
||||
test('should reject customCss number', () => {
|
||||
it('rejects customCss number', () => {
|
||||
expect(() =>
|
||||
testValidateThemeConfig({
|
||||
customCss: 42,
|
||||
|
|
@ -509,7 +509,7 @@ describe('themeConfig', () => {
|
|||
const withDefaultValues = (colorMode) =>
|
||||
_.merge({}, DEFAULT_CONFIG.colorMode, colorMode);
|
||||
|
||||
test('switch config', () => {
|
||||
it('switch config', () => {
|
||||
const colorMode = {
|
||||
switchConfig: {
|
||||
darkIcon: '🌙',
|
||||
|
|
@ -522,7 +522,7 @@ describe('themeConfig', () => {
|
|||
);
|
||||
});
|
||||
|
||||
test('max config', () => {
|
||||
it('max config', () => {
|
||||
const colorMode = {
|
||||
defaultMode: 'dark',
|
||||
disableSwitch: false,
|
||||
|
|
@ -534,7 +534,7 @@ describe('themeConfig', () => {
|
|||
});
|
||||
});
|
||||
|
||||
test('undefined config', () => {
|
||||
it('undefined config', () => {
|
||||
const colorMode = undefined;
|
||||
expect(testValidateThemeConfig({colorMode})).toEqual({
|
||||
...DEFAULT_CONFIG,
|
||||
|
|
@ -542,7 +542,7 @@ describe('themeConfig', () => {
|
|||
});
|
||||
});
|
||||
|
||||
test('empty config', () => {
|
||||
it('empty config', () => {
|
||||
const colorMode = {};
|
||||
expect(testValidateThemeConfig({colorMode})).toEqual({
|
||||
...DEFAULT_CONFIG,
|
||||
|
|
@ -553,132 +553,132 @@ describe('themeConfig', () => {
|
|||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('themeConfig tableOfContents', () => {
|
||||
test('toc undefined', () => {
|
||||
const tableOfContents = undefined;
|
||||
expect(testValidateThemeConfig({tableOfContents})).toEqual({
|
||||
...DEFAULT_CONFIG,
|
||||
tableOfContents: {
|
||||
minHeadingLevel: DEFAULT_CONFIG.tableOfContents.minHeadingLevel,
|
||||
maxHeadingLevel: DEFAULT_CONFIG.tableOfContents.maxHeadingLevel,
|
||||
},
|
||||
describe('tableOfContents', () => {
|
||||
it('accepts undefined', () => {
|
||||
const tableOfContents = undefined;
|
||||
expect(testValidateThemeConfig({tableOfContents})).toEqual({
|
||||
...DEFAULT_CONFIG,
|
||||
tableOfContents: {
|
||||
minHeadingLevel: DEFAULT_CONFIG.tableOfContents.minHeadingLevel,
|
||||
maxHeadingLevel: DEFAULT_CONFIG.tableOfContents.maxHeadingLevel,
|
||||
},
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
test('toc empty', () => {
|
||||
const tableOfContents = {};
|
||||
expect(testValidateThemeConfig({tableOfContents})).toEqual({
|
||||
...DEFAULT_CONFIG,
|
||||
tableOfContents: {
|
||||
minHeadingLevel: DEFAULT_CONFIG.tableOfContents.minHeadingLevel,
|
||||
maxHeadingLevel: DEFAULT_CONFIG.tableOfContents.maxHeadingLevel,
|
||||
},
|
||||
it('accepts empty', () => {
|
||||
const tableOfContents = {};
|
||||
expect(testValidateThemeConfig({tableOfContents})).toEqual({
|
||||
...DEFAULT_CONFIG,
|
||||
tableOfContents: {
|
||||
minHeadingLevel: DEFAULT_CONFIG.tableOfContents.minHeadingLevel,
|
||||
maxHeadingLevel: DEFAULT_CONFIG.tableOfContents.maxHeadingLevel,
|
||||
},
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
test('toc with min', () => {
|
||||
const tableOfContents = {
|
||||
minHeadingLevel: 3,
|
||||
};
|
||||
expect(testValidateThemeConfig({tableOfContents})).toEqual({
|
||||
...DEFAULT_CONFIG,
|
||||
tableOfContents: {
|
||||
it('accepts min', () => {
|
||||
const tableOfContents = {
|
||||
minHeadingLevel: 3,
|
||||
maxHeadingLevel: DEFAULT_CONFIG.tableOfContents.maxHeadingLevel,
|
||||
},
|
||||
};
|
||||
expect(testValidateThemeConfig({tableOfContents})).toEqual({
|
||||
...DEFAULT_CONFIG,
|
||||
tableOfContents: {
|
||||
minHeadingLevel: 3,
|
||||
maxHeadingLevel: DEFAULT_CONFIG.tableOfContents.maxHeadingLevel,
|
||||
},
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
test('toc with max', () => {
|
||||
const tableOfContents = {
|
||||
maxHeadingLevel: 5,
|
||||
};
|
||||
expect(testValidateThemeConfig({tableOfContents})).toEqual({
|
||||
...DEFAULT_CONFIG,
|
||||
tableOfContents: {
|
||||
minHeadingLevel: DEFAULT_CONFIG.tableOfContents.minHeadingLevel,
|
||||
it('accepts max', () => {
|
||||
const tableOfContents = {
|
||||
maxHeadingLevel: 5,
|
||||
},
|
||||
};
|
||||
expect(testValidateThemeConfig({tableOfContents})).toEqual({
|
||||
...DEFAULT_CONFIG,
|
||||
tableOfContents: {
|
||||
minHeadingLevel: DEFAULT_CONFIG.tableOfContents.minHeadingLevel,
|
||||
maxHeadingLevel: 5,
|
||||
},
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
test('toc with min 2.5', () => {
|
||||
const tableOfContents = {
|
||||
minHeadingLevel: 2.5,
|
||||
};
|
||||
expect(() =>
|
||||
testValidateThemeConfig({tableOfContents}),
|
||||
).toThrowErrorMatchingInlineSnapshot(
|
||||
`"\\"tableOfContents.minHeadingLevel\\" must be an integer"`,
|
||||
);
|
||||
});
|
||||
it('rejects min 2.5', () => {
|
||||
const tableOfContents = {
|
||||
minHeadingLevel: 2.5,
|
||||
};
|
||||
expect(() =>
|
||||
testValidateThemeConfig({tableOfContents}),
|
||||
).toThrowErrorMatchingInlineSnapshot(
|
||||
`"\\"tableOfContents.minHeadingLevel\\" must be an integer"`,
|
||||
);
|
||||
});
|
||||
|
||||
test('toc with max 2.5', () => {
|
||||
const tableOfContents = {
|
||||
maxHeadingLevel: 2.5,
|
||||
};
|
||||
expect(() =>
|
||||
testValidateThemeConfig({tableOfContents}),
|
||||
).toThrowErrorMatchingInlineSnapshot(
|
||||
`"\\"tableOfContents.maxHeadingLevel\\" must be an integer"`,
|
||||
);
|
||||
});
|
||||
it('rejects max 2.5', () => {
|
||||
const tableOfContents = {
|
||||
maxHeadingLevel: 2.5,
|
||||
};
|
||||
expect(() =>
|
||||
testValidateThemeConfig({tableOfContents}),
|
||||
).toThrowErrorMatchingInlineSnapshot(
|
||||
`"\\"tableOfContents.maxHeadingLevel\\" must be an integer"`,
|
||||
);
|
||||
});
|
||||
|
||||
test('toc with min 1', () => {
|
||||
const tableOfContents = {
|
||||
minHeadingLevel: 1,
|
||||
};
|
||||
expect(() =>
|
||||
testValidateThemeConfig({tableOfContents}),
|
||||
).toThrowErrorMatchingInlineSnapshot(
|
||||
`"\\"tableOfContents.minHeadingLevel\\" must be greater than or equal to 2"`,
|
||||
);
|
||||
});
|
||||
it('rejects min 1', () => {
|
||||
const tableOfContents = {
|
||||
minHeadingLevel: 1,
|
||||
};
|
||||
expect(() =>
|
||||
testValidateThemeConfig({tableOfContents}),
|
||||
).toThrowErrorMatchingInlineSnapshot(
|
||||
`"\\"tableOfContents.minHeadingLevel\\" must be greater than or equal to 2"`,
|
||||
);
|
||||
});
|
||||
|
||||
test('toc with min 7', () => {
|
||||
const tableOfContents = {
|
||||
minHeadingLevel: 7,
|
||||
};
|
||||
expect(() =>
|
||||
testValidateThemeConfig({tableOfContents}),
|
||||
).toThrowErrorMatchingInlineSnapshot(
|
||||
`"\\"tableOfContents.minHeadingLevel\\" must be less than or equal to ref:maxHeadingLevel"`,
|
||||
);
|
||||
});
|
||||
it('rejects min 7', () => {
|
||||
const tableOfContents = {
|
||||
minHeadingLevel: 7,
|
||||
};
|
||||
expect(() =>
|
||||
testValidateThemeConfig({tableOfContents}),
|
||||
).toThrowErrorMatchingInlineSnapshot(
|
||||
`"\\"tableOfContents.minHeadingLevel\\" must be less than or equal to ref:maxHeadingLevel"`,
|
||||
);
|
||||
});
|
||||
|
||||
test('toc with max 1', () => {
|
||||
const tableOfContents = {
|
||||
maxHeadingLevel: 1,
|
||||
};
|
||||
expect(() =>
|
||||
testValidateThemeConfig({tableOfContents}),
|
||||
).toThrowErrorMatchingInlineSnapshot(
|
||||
`"\\"tableOfContents.maxHeadingLevel\\" must be greater than or equal to 2"`,
|
||||
);
|
||||
});
|
||||
it('rejects max 1', () => {
|
||||
const tableOfContents = {
|
||||
maxHeadingLevel: 1,
|
||||
};
|
||||
expect(() =>
|
||||
testValidateThemeConfig({tableOfContents}),
|
||||
).toThrowErrorMatchingInlineSnapshot(
|
||||
`"\\"tableOfContents.maxHeadingLevel\\" must be greater than or equal to 2"`,
|
||||
);
|
||||
});
|
||||
|
||||
test('toc with max 7', () => {
|
||||
const tableOfContents = {
|
||||
maxHeadingLevel: 7,
|
||||
};
|
||||
expect(() =>
|
||||
testValidateThemeConfig({tableOfContents}),
|
||||
).toThrowErrorMatchingInlineSnapshot(
|
||||
`"\\"tableOfContents.maxHeadingLevel\\" must be less than or equal to 6"`,
|
||||
);
|
||||
});
|
||||
it('rejects max 7', () => {
|
||||
const tableOfContents = {
|
||||
maxHeadingLevel: 7,
|
||||
};
|
||||
expect(() =>
|
||||
testValidateThemeConfig({tableOfContents}),
|
||||
).toThrowErrorMatchingInlineSnapshot(
|
||||
`"\\"tableOfContents.maxHeadingLevel\\" must be less than or equal to 6"`,
|
||||
);
|
||||
});
|
||||
|
||||
test('toc with bad min 5 + max 3', () => {
|
||||
const tableOfContents = {
|
||||
minHeadingLevel: 5,
|
||||
maxHeadingLevel: 3,
|
||||
};
|
||||
expect(() =>
|
||||
testValidateThemeConfig({tableOfContents}),
|
||||
).toThrowErrorMatchingInlineSnapshot(
|
||||
`"\\"tableOfContents.minHeadingLevel\\" must be less than or equal to ref:maxHeadingLevel"`,
|
||||
);
|
||||
it('rejects min 5 + max 3', () => {
|
||||
const tableOfContents = {
|
||||
minHeadingLevel: 5,
|
||||
maxHeadingLevel: 3,
|
||||
};
|
||||
expect(() =>
|
||||
testValidateThemeConfig({tableOfContents}),
|
||||
).toThrowErrorMatchingInlineSnapshot(
|
||||
`"\\"tableOfContents.minHeadingLevel\\" must be less than or equal to ref:maxHeadingLevel"`,
|
||||
);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
|
|
|||
|
|
@ -15,7 +15,7 @@ import {
|
|||
} from '@docusaurus/theme-common';
|
||||
|
||||
describe('Tabs', () => {
|
||||
test('Should reject bad Tabs child', () => {
|
||||
it('rejects bad Tabs child', () => {
|
||||
expect(() => {
|
||||
renderer.create(
|
||||
<Tabs>
|
||||
|
|
@ -27,7 +27,7 @@ describe('Tabs', () => {
|
|||
`"Docusaurus error: Bad <Tabs> child <div>: all children of the <Tabs> component should be <TabItem>, and every <TabItem> should have a unique \\"value\\" prop."`,
|
||||
);
|
||||
});
|
||||
test('Should reject bad Tabs defaultValue', () => {
|
||||
it('rejects bad Tabs defaultValue', () => {
|
||||
expect(() => {
|
||||
renderer.create(
|
||||
<Tabs defaultValue="bad">
|
||||
|
|
@ -39,7 +39,7 @@ describe('Tabs', () => {
|
|||
`"Docusaurus error: The <Tabs> has a defaultValue \\"bad\\" but none of its children has the corresponding value. Available values are: v1, v2. If you intend to show no default tab, use defaultValue={null} instead."`,
|
||||
);
|
||||
});
|
||||
test('Should reject duplicate values', () => {
|
||||
it('rejects duplicate values', () => {
|
||||
expect(() => {
|
||||
renderer.create(
|
||||
<Tabs>
|
||||
|
|
@ -55,7 +55,7 @@ describe('Tabs', () => {
|
|||
`"Docusaurus error: Duplicate values \\"v1, v2\\" found in <Tabs>. Every value needs to be unique."`,
|
||||
);
|
||||
});
|
||||
test('Should accept valid Tabs config', () => {
|
||||
it('accepts valid Tabs config', () => {
|
||||
expect(() => {
|
||||
renderer.create(
|
||||
<ScrollControllerProvider>
|
||||
|
|
@ -110,7 +110,7 @@ describe('Tabs', () => {
|
|||
}).not.toThrow(); // TODO Better Jest infrastructure to mock the Layout
|
||||
});
|
||||
// https://github.com/facebook/docusaurus/issues/5729
|
||||
test('Should accept dynamic Tabs with number values', () => {
|
||||
it('accepts dynamic Tabs with number values', () => {
|
||||
expect(() => {
|
||||
const tabs = ['Apple', 'Banana', 'Carrot'];
|
||||
renderer.create(
|
||||
|
|
|
|||
|
|
@ -0,0 +1,123 @@
|
|||
// Jest Snapshot v1, https://goo.gl/fbAQLP
|
||||
|
||||
exports[`parseLines does not parse content with metastring 1`] = `
|
||||
Object {
|
||||
"code": "aaaaa
|
||||
bbbbb",
|
||||
"highlightLines": Array [
|
||||
0,
|
||||
],
|
||||
}
|
||||
`;
|
||||
|
||||
exports[`parseLines does not parse content with metastring 2`] = `
|
||||
Object {
|
||||
"code": "// highlight-next-line
|
||||
aaaaa
|
||||
bbbbb",
|
||||
"highlightLines": Array [
|
||||
0,
|
||||
],
|
||||
}
|
||||
`;
|
||||
|
||||
exports[`parseLines does not parse content with metastring 3`] = `
|
||||
Object {
|
||||
"code": "aaaaa
|
||||
bbbbb",
|
||||
"highlightLines": Array [
|
||||
0,
|
||||
],
|
||||
}
|
||||
`;
|
||||
|
||||
exports[`parseLines does not parse content with no language 1`] = `
|
||||
Object {
|
||||
"code": "// highlight-next-line
|
||||
aaaaa
|
||||
bbbbb",
|
||||
"highlightLines": Array [],
|
||||
}
|
||||
`;
|
||||
|
||||
exports[`parseLines removes lines correctly 1`] = `
|
||||
Object {
|
||||
"code": "aaaaa
|
||||
bbbbb",
|
||||
"highlightLines": Array [
|
||||
0,
|
||||
],
|
||||
}
|
||||
`;
|
||||
|
||||
exports[`parseLines removes lines correctly 2`] = `
|
||||
Object {
|
||||
"code": "aaaaa
|
||||
bbbbb",
|
||||
"highlightLines": Array [
|
||||
0,
|
||||
],
|
||||
}
|
||||
`;
|
||||
|
||||
exports[`parseLines removes lines correctly 3`] = `
|
||||
Object {
|
||||
"code": "aaaaa
|
||||
bbbbbbb
|
||||
bbbbb",
|
||||
"highlightLines": Array [
|
||||
0,
|
||||
2,
|
||||
0,
|
||||
1,
|
||||
],
|
||||
}
|
||||
`;
|
||||
|
||||
exports[`parseLines respects language 1`] = `
|
||||
Object {
|
||||
"code": "# highlight-next-line
|
||||
aaaaa
|
||||
bbbbb",
|
||||
"highlightLines": Array [],
|
||||
}
|
||||
`;
|
||||
|
||||
exports[`parseLines respects language 2`] = `
|
||||
Object {
|
||||
"code": "/* highlight-next-line */
|
||||
aaaaa
|
||||
bbbbb",
|
||||
"highlightLines": Array [],
|
||||
}
|
||||
`;
|
||||
|
||||
exports[`parseLines respects language 3`] = `
|
||||
Object {
|
||||
"code": "// highlight-next-line
|
||||
aaaa
|
||||
/* highlight-next-line */
|
||||
bbbbb
|
||||
ccccc
|
||||
<!-- highlight-next-line -->
|
||||
dddd",
|
||||
"highlightLines": Array [
|
||||
4,
|
||||
],
|
||||
}
|
||||
`;
|
||||
|
||||
exports[`parseLines respects language 4`] = `
|
||||
Object {
|
||||
"code": "aaaa
|
||||
bbbbb
|
||||
ccccc
|
||||
dddd",
|
||||
"highlightLines": Array [
|
||||
0,
|
||||
1,
|
||||
2,
|
||||
3,
|
||||
],
|
||||
}
|
||||
`;
|
||||
|
|
@ -12,45 +12,45 @@ import {
|
|||
} from '../codeBlockUtils';
|
||||
|
||||
describe('parseCodeBlockTitle', () => {
|
||||
test('should parse double quote delimited title', () => {
|
||||
it('parses double quote delimited title', () => {
|
||||
expect(parseCodeBlockTitle(`title="index.js"`)).toEqual(`index.js`);
|
||||
});
|
||||
|
||||
test('should parse single quote delimited title', () => {
|
||||
it('parses single quote delimited title', () => {
|
||||
expect(parseCodeBlockTitle(`title='index.js'`)).toEqual(`index.js`);
|
||||
});
|
||||
|
||||
test('should not parse mismatched quote delimiters', () => {
|
||||
it('does not parse mismatched quote delimiters', () => {
|
||||
expect(parseCodeBlockTitle(`title="index.js'`)).toEqual(``);
|
||||
});
|
||||
|
||||
test('should parse undefined metastring', () => {
|
||||
it('parses undefined metastring', () => {
|
||||
expect(parseCodeBlockTitle(undefined)).toEqual(``);
|
||||
});
|
||||
|
||||
test('should parse metastring with no title specified', () => {
|
||||
it('parses metastring with no title specified', () => {
|
||||
expect(parseCodeBlockTitle(`{1,2-3}`)).toEqual(``);
|
||||
});
|
||||
|
||||
test('should parse with multiple metadata title first', () => {
|
||||
it('parses with multiple metadata title first', () => {
|
||||
expect(parseCodeBlockTitle(`title="index.js" label="JavaScript"`)).toEqual(
|
||||
`index.js`,
|
||||
);
|
||||
});
|
||||
|
||||
test('should parse with multiple metadata title last', () => {
|
||||
it('parses with multiple metadata title last', () => {
|
||||
expect(parseCodeBlockTitle(`label="JavaScript" title="index.js"`)).toEqual(
|
||||
`index.js`,
|
||||
);
|
||||
});
|
||||
|
||||
test('should parse double quotes when delimited by single quotes', () => {
|
||||
it('parses double quotes when delimited by single quotes', () => {
|
||||
expect(parseCodeBlockTitle(`title='console.log("Hello, World!")'`)).toEqual(
|
||||
`console.log("Hello, World!")`,
|
||||
);
|
||||
});
|
||||
|
||||
test('should parse single quotes when delimited by double quotes', () => {
|
||||
it('parses single quotes when delimited by double quotes', () => {
|
||||
expect(parseCodeBlockTitle(`title="console.log('Hello, World!')"`)).toEqual(
|
||||
`console.log('Hello, World!')`,
|
||||
);
|
||||
|
|
@ -58,7 +58,7 @@ describe('parseCodeBlockTitle', () => {
|
|||
});
|
||||
|
||||
describe('parseLanguage', () => {
|
||||
test('behaves correctly', () => {
|
||||
it('works', () => {
|
||||
expect(parseLanguage('language-foo xxx yyy')).toEqual('foo');
|
||||
expect(parseLanguage('xxxxx language-foo yyy')).toEqual('foo');
|
||||
expect(parseLanguage('xx-language-foo yyyy')).toBeUndefined();
|
||||
|
|
@ -67,16 +67,8 @@ describe('parseLanguage', () => {
|
|||
});
|
||||
|
||||
describe('parseLines', () => {
|
||||
test('does not parse content with metastring', () => {
|
||||
expect(parseLines('aaaaa\nbbbbb', '{1}', 'js')).toMatchInlineSnapshot(`
|
||||
Object {
|
||||
"code": "aaaaa
|
||||
bbbbb",
|
||||
"highlightLines": Array [
|
||||
0,
|
||||
],
|
||||
}
|
||||
`);
|
||||
it('does not parse content with metastring', () => {
|
||||
expect(parseLines('aaaaa\nbbbbb', '{1}', 'js')).toMatchSnapshot();
|
||||
expect(
|
||||
parseLines(
|
||||
`// highlight-next-line
|
||||
|
|
@ -85,33 +77,16 @@ bbbbb`,
|
|||
'{1}',
|
||||
'js',
|
||||
),
|
||||
).toMatchInlineSnapshot(`
|
||||
Object {
|
||||
"code": "// highlight-next-line
|
||||
aaaaa
|
||||
bbbbb",
|
||||
"highlightLines": Array [
|
||||
0,
|
||||
],
|
||||
}
|
||||
`);
|
||||
).toMatchSnapshot();
|
||||
expect(
|
||||
parseLines(
|
||||
`aaaaa
|
||||
bbbbb`,
|
||||
'{1}',
|
||||
),
|
||||
).toMatchInlineSnapshot(`
|
||||
Object {
|
||||
"code": "aaaaa
|
||||
bbbbb",
|
||||
"highlightLines": Array [
|
||||
0,
|
||||
],
|
||||
}
|
||||
`);
|
||||
).toMatchSnapshot();
|
||||
});
|
||||
test('does not parse content with no language', () => {
|
||||
it('does not parse content with no language', () => {
|
||||
expect(
|
||||
parseLines(
|
||||
`// highlight-next-line
|
||||
|
|
@ -120,16 +95,9 @@ bbbbb`,
|
|||
'',
|
||||
undefined,
|
||||
),
|
||||
).toMatchInlineSnapshot(`
|
||||
Object {
|
||||
"code": "// highlight-next-line
|
||||
aaaaa
|
||||
bbbbb",
|
||||
"highlightLines": Array [],
|
||||
}
|
||||
`);
|
||||
).toMatchSnapshot();
|
||||
});
|
||||
test('removes lines correctly', () => {
|
||||
it('removes lines correctly', () => {
|
||||
expect(
|
||||
parseLines(
|
||||
`// highlight-next-line
|
||||
|
|
@ -138,15 +106,7 @@ bbbbb`,
|
|||
'',
|
||||
'js',
|
||||
),
|
||||
).toMatchInlineSnapshot(`
|
||||
Object {
|
||||
"code": "aaaaa
|
||||
bbbbb",
|
||||
"highlightLines": Array [
|
||||
0,
|
||||
],
|
||||
}
|
||||
`);
|
||||
).toMatchSnapshot();
|
||||
expect(
|
||||
parseLines(
|
||||
`// highlight-start
|
||||
|
|
@ -156,15 +116,7 @@ bbbbb`,
|
|||
'',
|
||||
'js',
|
||||
),
|
||||
).toMatchInlineSnapshot(`
|
||||
Object {
|
||||
"code": "aaaaa
|
||||
bbbbb",
|
||||
"highlightLines": Array [
|
||||
0,
|
||||
],
|
||||
}
|
||||
`);
|
||||
).toMatchSnapshot();
|
||||
expect(
|
||||
parseLines(
|
||||
`// highlight-start
|
||||
|
|
@ -177,21 +129,9 @@ bbbbb`,
|
|||
'',
|
||||
'js',
|
||||
),
|
||||
).toMatchInlineSnapshot(`
|
||||
Object {
|
||||
"code": "aaaaa
|
||||
bbbbbbb
|
||||
bbbbb",
|
||||
"highlightLines": Array [
|
||||
0,
|
||||
2,
|
||||
0,
|
||||
1,
|
||||
],
|
||||
}
|
||||
`);
|
||||
).toMatchSnapshot();
|
||||
});
|
||||
test('respects language', () => {
|
||||
it('respects language', () => {
|
||||
expect(
|
||||
parseLines(
|
||||
`# highlight-next-line
|
||||
|
|
@ -200,14 +140,7 @@ bbbbb`,
|
|||
'',
|
||||
'js',
|
||||
),
|
||||
).toMatchInlineSnapshot(`
|
||||
Object {
|
||||
"code": "# highlight-next-line
|
||||
aaaaa
|
||||
bbbbb",
|
||||
"highlightLines": Array [],
|
||||
}
|
||||
`);
|
||||
).toMatchSnapshot();
|
||||
expect(
|
||||
parseLines(
|
||||
`/* highlight-next-line */
|
||||
|
|
@ -216,14 +149,7 @@ bbbbb`,
|
|||
'',
|
||||
'py',
|
||||
),
|
||||
).toMatchInlineSnapshot(`
|
||||
Object {
|
||||
"code": "/* highlight-next-line */
|
||||
aaaaa
|
||||
bbbbb",
|
||||
"highlightLines": Array [],
|
||||
}
|
||||
`);
|
||||
).toMatchSnapshot();
|
||||
expect(
|
||||
parseLines(
|
||||
`// highlight-next-line
|
||||
|
|
@ -237,20 +163,7 @@ dddd`,
|
|||
'',
|
||||
'py',
|
||||
),
|
||||
).toMatchInlineSnapshot(`
|
||||
Object {
|
||||
"code": "// highlight-next-line
|
||||
aaaa
|
||||
/* highlight-next-line */
|
||||
bbbbb
|
||||
ccccc
|
||||
<!-- highlight-next-line -->
|
||||
dddd",
|
||||
"highlightLines": Array [
|
||||
4,
|
||||
],
|
||||
}
|
||||
`);
|
||||
).toMatchSnapshot();
|
||||
expect(
|
||||
parseLines(
|
||||
`// highlight-next-line
|
||||
|
|
@ -264,19 +177,6 @@ dddd`,
|
|||
'',
|
||||
'',
|
||||
),
|
||||
).toMatchInlineSnapshot(`
|
||||
Object {
|
||||
"code": "aaaa
|
||||
bbbbb
|
||||
ccccc
|
||||
dddd",
|
||||
"highlightLines": Array [
|
||||
0,
|
||||
1,
|
||||
2,
|
||||
3,
|
||||
],
|
||||
}
|
||||
`);
|
||||
).toMatchSnapshot();
|
||||
});
|
||||
});
|
||||
|
|
|
|||
|
|
@ -65,365 +65,353 @@ function testVersion(data?: Partial<PropVersionMetadata>): PropVersionMetadata {
|
|||
};
|
||||
}
|
||||
|
||||
describe('docsUtils', () => {
|
||||
describe('useDocsVersion', () => {
|
||||
test('should throw if context provider is missing', () => {
|
||||
expect(
|
||||
() => renderHook(() => useDocsVersion()).result.current,
|
||||
).toThrowErrorMatchingInlineSnapshot(
|
||||
`"Hook useDocsVersion is called outside the <DocsVersionProvider>. "`,
|
||||
);
|
||||
});
|
||||
|
||||
test('should read value from context provider', () => {
|
||||
const version = testVersion();
|
||||
const {result} = renderHook(() => useDocsVersion(), {
|
||||
wrapper: ({children}) => (
|
||||
<DocsVersionProvider version={version}>
|
||||
{children}
|
||||
</DocsVersionProvider>
|
||||
),
|
||||
});
|
||||
expect(result.current).toBe(version);
|
||||
});
|
||||
describe('useDocsVersion', () => {
|
||||
it('throws if context provider is missing', () => {
|
||||
expect(
|
||||
() => renderHook(() => useDocsVersion()).result.current,
|
||||
).toThrowErrorMatchingInlineSnapshot(
|
||||
`"Hook useDocsVersion is called outside the <DocsVersionProvider>. "`,
|
||||
);
|
||||
});
|
||||
|
||||
describe('useDocsSidebar', () => {
|
||||
test('should throw if context provider is missing', () => {
|
||||
expect(
|
||||
() => renderHook(() => useDocsSidebar()).result.current,
|
||||
).toThrowErrorMatchingInlineSnapshot(
|
||||
`"Hook useDocsSidebar is called outside the <DocsSidebarProvider>. "`,
|
||||
);
|
||||
it('reads value from context provider', () => {
|
||||
const version = testVersion();
|
||||
const {result} = renderHook(() => useDocsVersion(), {
|
||||
wrapper: ({children}) => (
|
||||
<DocsVersionProvider version={version}>{children}</DocsVersionProvider>
|
||||
),
|
||||
});
|
||||
expect(result.current).toBe(version);
|
||||
});
|
||||
});
|
||||
|
||||
test('should read value from context provider', () => {
|
||||
const sidebar: PropSidebar = [];
|
||||
const {result} = renderHook(() => useDocsSidebar(), {
|
||||
wrapper: ({children}) => (
|
||||
<DocsSidebarProvider sidebar={sidebar}>
|
||||
{children}
|
||||
</DocsSidebarProvider>
|
||||
),
|
||||
});
|
||||
expect(result.current).toBe(sidebar);
|
||||
});
|
||||
describe('useDocsSidebar', () => {
|
||||
it('throws if context provider is missing', () => {
|
||||
expect(
|
||||
() => renderHook(() => useDocsSidebar()).result.current,
|
||||
).toThrowErrorMatchingInlineSnapshot(
|
||||
`"Hook useDocsSidebar is called outside the <DocsSidebarProvider>. "`,
|
||||
);
|
||||
});
|
||||
|
||||
describe('useDocById', () => {
|
||||
const version = testVersion({
|
||||
docs: {
|
||||
doc1: {
|
||||
id: 'doc1',
|
||||
title: 'Doc 1',
|
||||
description: 'desc1',
|
||||
sidebar: 'sidebar1',
|
||||
},
|
||||
doc2: {
|
||||
id: 'doc2',
|
||||
title: 'Doc 2',
|
||||
description: 'desc2',
|
||||
sidebar: 'sidebar2',
|
||||
},
|
||||
it('reads value from context provider', () => {
|
||||
const sidebar: PropSidebar = [];
|
||||
const {result} = renderHook(() => useDocsSidebar(), {
|
||||
wrapper: ({children}) => (
|
||||
<DocsSidebarProvider sidebar={sidebar}>{children}</DocsSidebarProvider>
|
||||
),
|
||||
});
|
||||
expect(result.current).toBe(sidebar);
|
||||
});
|
||||
});
|
||||
|
||||
describe('useDocById', () => {
|
||||
const version = testVersion({
|
||||
docs: {
|
||||
doc1: {
|
||||
id: 'doc1',
|
||||
title: 'Doc 1',
|
||||
description: 'desc1',
|
||||
sidebar: 'sidebar1',
|
||||
},
|
||||
});
|
||||
|
||||
function callHook(docId: string | undefined) {
|
||||
const {result} = renderHook(() => useDocById(docId), {
|
||||
wrapper: ({children}) => (
|
||||
<DocsVersionProvider version={version}>
|
||||
{children}
|
||||
</DocsVersionProvider>
|
||||
),
|
||||
});
|
||||
return result.current;
|
||||
}
|
||||
|
||||
test('should accept undefined', () => {
|
||||
expect(callHook(undefined)).toBeUndefined();
|
||||
});
|
||||
|
||||
test('should find doc1', () => {
|
||||
expect(callHook('doc1')).toMatchObject({id: 'doc1'});
|
||||
});
|
||||
test('should find doc2', () => {
|
||||
expect(callHook('doc2')).toMatchObject({id: 'doc2'});
|
||||
});
|
||||
|
||||
test('should throw for doc3', () => {
|
||||
expect(() => callHook('doc3')).toThrowErrorMatchingInlineSnapshot(
|
||||
`"no version doc found by id=doc3"`,
|
||||
);
|
||||
});
|
||||
doc2: {
|
||||
id: 'doc2',
|
||||
title: 'Doc 2',
|
||||
description: 'desc2',
|
||||
sidebar: 'sidebar2',
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
describe('findSidebarCategory', () => {
|
||||
test('should be able to return undefined', () => {
|
||||
expect(findSidebarCategory([], () => false)).toBeUndefined();
|
||||
expect(
|
||||
findSidebarCategory([testCategory(), testCategory()], () => false),
|
||||
).toBeUndefined();
|
||||
function callHook(docId: string | undefined) {
|
||||
const {result} = renderHook(() => useDocById(docId), {
|
||||
wrapper: ({children}) => (
|
||||
<DocsVersionProvider version={version}>{children}</DocsVersionProvider>
|
||||
),
|
||||
});
|
||||
return result.current;
|
||||
}
|
||||
|
||||
test('should return first element matching predicate', () => {
|
||||
const first = testCategory();
|
||||
const second = testCategory();
|
||||
const third = testCategory();
|
||||
const sidebar = [first, second, third];
|
||||
expect(findSidebarCategory(sidebar, () => true)).toEqual(first);
|
||||
expect(findSidebarCategory(sidebar, (item) => item === first)).toEqual(
|
||||
first,
|
||||
);
|
||||
expect(findSidebarCategory(sidebar, (item) => item === second)).toEqual(
|
||||
second,
|
||||
);
|
||||
expect(findSidebarCategory(sidebar, (item) => item === third)).toEqual(
|
||||
third,
|
||||
);
|
||||
});
|
||||
|
||||
test('should be able to search in sub items', () => {
|
||||
const subsub1 = testCategory();
|
||||
const subsub2 = testCategory();
|
||||
const sub1 = testCategory({
|
||||
items: [subsub1, subsub2],
|
||||
});
|
||||
const sub2 = testCategory();
|
||||
const parent = testCategory({
|
||||
items: [sub1, sub2],
|
||||
});
|
||||
const sidebar = [parent];
|
||||
|
||||
expect(findSidebarCategory(sidebar, () => true)).toEqual(parent);
|
||||
expect(findSidebarCategory(sidebar, (item) => item === sub1)).toEqual(
|
||||
sub1,
|
||||
);
|
||||
expect(findSidebarCategory(sidebar, (item) => item === sub2)).toEqual(
|
||||
sub2,
|
||||
);
|
||||
expect(findSidebarCategory(sidebar, (item) => item === subsub1)).toEqual(
|
||||
subsub1,
|
||||
);
|
||||
expect(findSidebarCategory(sidebar, (item) => item === subsub2)).toEqual(
|
||||
subsub2,
|
||||
);
|
||||
});
|
||||
it('accepts undefined', () => {
|
||||
expect(callHook(undefined)).toBeUndefined();
|
||||
});
|
||||
|
||||
describe('findFirstCategoryLink', () => {
|
||||
test('category without link nor child', () => {
|
||||
expect(
|
||||
findFirstCategoryLink(
|
||||
testCategory({
|
||||
href: undefined,
|
||||
}),
|
||||
),
|
||||
).toEqual(undefined);
|
||||
});
|
||||
|
||||
test('category with link', () => {
|
||||
expect(
|
||||
findFirstCategoryLink(
|
||||
testCategory({
|
||||
href: '/itemPath',
|
||||
}),
|
||||
),
|
||||
).toEqual('/itemPath');
|
||||
});
|
||||
|
||||
test('category with deeply nested category link', () => {
|
||||
expect(
|
||||
findFirstCategoryLink(
|
||||
testCategory({
|
||||
href: undefined,
|
||||
items: [
|
||||
{type: 'html', value: '<p>test1</p>'},
|
||||
testCategory({
|
||||
href: undefined,
|
||||
items: [
|
||||
{type: 'html', value: '<p>test2</p>'},
|
||||
testCategory({
|
||||
href: '/itemPath',
|
||||
}),
|
||||
],
|
||||
}),
|
||||
],
|
||||
}),
|
||||
),
|
||||
).toEqual('/itemPath');
|
||||
});
|
||||
|
||||
test('category with deeply nested link', () => {
|
||||
expect(
|
||||
findFirstCategoryLink(
|
||||
testCategory({
|
||||
href: undefined,
|
||||
items: [
|
||||
testCategory({
|
||||
href: undefined,
|
||||
items: [{type: 'link', href: '/itemPath', label: 'Label'}],
|
||||
}),
|
||||
],
|
||||
}),
|
||||
),
|
||||
).toEqual('/itemPath');
|
||||
});
|
||||
it('finds doc1', () => {
|
||||
expect(callHook('doc1')).toMatchObject({id: 'doc1'});
|
||||
});
|
||||
it('finds doc2', () => {
|
||||
expect(callHook('doc2')).toMatchObject({id: 'doc2'});
|
||||
});
|
||||
|
||||
describe('isActiveSidebarItem', () => {
|
||||
test('with link href', () => {
|
||||
const item: PropSidebarItem = {
|
||||
type: 'link',
|
||||
href: '/itemPath',
|
||||
label: 'Label',
|
||||
};
|
||||
it('throws for doc3', () => {
|
||||
expect(() => callHook('doc3')).toThrowErrorMatchingInlineSnapshot(
|
||||
`"no version doc found by id=doc3"`,
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
expect(isActiveSidebarItem(item, '/unexistingPath')).toEqual(false);
|
||||
|
||||
expect(isActiveSidebarItem(item, '/itemPath')).toEqual(true);
|
||||
|
||||
// Ensure it's not trailing slash sensitive:
|
||||
expect(isActiveSidebarItem(item, '/itemPath/')).toEqual(true);
|
||||
expect(
|
||||
isActiveSidebarItem({...item, href: '/itemPath/'}, '/itemPath'),
|
||||
).toEqual(true);
|
||||
});
|
||||
|
||||
test('with category href', () => {
|
||||
const item: PropSidebarItem = testCategory({
|
||||
href: '/itemPath',
|
||||
});
|
||||
|
||||
expect(isActiveSidebarItem(item, '/unexistingPath')).toEqual(false);
|
||||
|
||||
expect(isActiveSidebarItem(item, '/itemPath')).toEqual(true);
|
||||
|
||||
// Ensure it's not trailing slash sensitive:
|
||||
expect(isActiveSidebarItem(item, '/itemPath/')).toEqual(true);
|
||||
expect(
|
||||
isActiveSidebarItem({...item, href: '/itemPath/'}, '/itemPath'),
|
||||
).toEqual(true);
|
||||
});
|
||||
|
||||
test('with category nested items', () => {
|
||||
const item: PropSidebarItem = testCategory({
|
||||
href: '/category-path',
|
||||
items: [
|
||||
{
|
||||
type: 'link',
|
||||
href: '/sub-link-path',
|
||||
label: 'Label',
|
||||
},
|
||||
testCategory({
|
||||
href: '/sub-category-path',
|
||||
items: [
|
||||
{
|
||||
type: 'link',
|
||||
href: '/sub-sub-link-path',
|
||||
label: 'Label',
|
||||
},
|
||||
],
|
||||
}),
|
||||
],
|
||||
});
|
||||
|
||||
expect(isActiveSidebarItem(item, '/unexistingPath')).toEqual(false);
|
||||
|
||||
expect(isActiveSidebarItem(item, '/category-path')).toEqual(true);
|
||||
expect(isActiveSidebarItem(item, '/sub-link-path')).toEqual(true);
|
||||
expect(isActiveSidebarItem(item, '/sub-category-path')).toEqual(true);
|
||||
expect(isActiveSidebarItem(item, '/sub-sub-link-path')).toEqual(true);
|
||||
|
||||
// Ensure it's not trailing slash sensitive:
|
||||
expect(isActiveSidebarItem(item, '/category-path/')).toEqual(true);
|
||||
expect(isActiveSidebarItem(item, '/sub-link-path/')).toEqual(true);
|
||||
expect(isActiveSidebarItem(item, '/sub-category-path/')).toEqual(true);
|
||||
expect(isActiveSidebarItem(item, '/sub-sub-link-path/')).toEqual(true);
|
||||
});
|
||||
describe('findSidebarCategory', () => {
|
||||
it('os able to return undefined', () => {
|
||||
expect(findSidebarCategory([], () => false)).toBeUndefined();
|
||||
expect(
|
||||
findSidebarCategory([testCategory(), testCategory()], () => false),
|
||||
).toBeUndefined();
|
||||
});
|
||||
|
||||
describe('getBreadcrumbs', () => {
|
||||
test('should return empty for empty sidebar', () => {
|
||||
expect(
|
||||
getBreadcrumbs({
|
||||
sidebar: [],
|
||||
pathname: '/doesNotExist',
|
||||
it('returns first element matching predicate', () => {
|
||||
const first = testCategory();
|
||||
const second = testCategory();
|
||||
const third = testCategory();
|
||||
const sidebar = [first, second, third];
|
||||
expect(findSidebarCategory(sidebar, () => true)).toEqual(first);
|
||||
expect(findSidebarCategory(sidebar, (item) => item === first)).toEqual(
|
||||
first,
|
||||
);
|
||||
expect(findSidebarCategory(sidebar, (item) => item === second)).toEqual(
|
||||
second,
|
||||
);
|
||||
expect(findSidebarCategory(sidebar, (item) => item === third)).toEqual(
|
||||
third,
|
||||
);
|
||||
});
|
||||
|
||||
it('is able to search in sub items', () => {
|
||||
const subsub1 = testCategory();
|
||||
const subsub2 = testCategory();
|
||||
const sub1 = testCategory({
|
||||
items: [subsub1, subsub2],
|
||||
});
|
||||
const sub2 = testCategory();
|
||||
const parent = testCategory({
|
||||
items: [sub1, sub2],
|
||||
});
|
||||
const sidebar = [parent];
|
||||
|
||||
expect(findSidebarCategory(sidebar, () => true)).toEqual(parent);
|
||||
expect(findSidebarCategory(sidebar, (item) => item === sub1)).toEqual(sub1);
|
||||
expect(findSidebarCategory(sidebar, (item) => item === sub2)).toEqual(sub2);
|
||||
expect(findSidebarCategory(sidebar, (item) => item === subsub1)).toEqual(
|
||||
subsub1,
|
||||
);
|
||||
expect(findSidebarCategory(sidebar, (item) => item === subsub2)).toEqual(
|
||||
subsub2,
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe('findFirstCategoryLink', () => {
|
||||
it('works with category without link nor child', () => {
|
||||
expect(
|
||||
findFirstCategoryLink(
|
||||
testCategory({
|
||||
href: undefined,
|
||||
}),
|
||||
).toEqual([]);
|
||||
),
|
||||
).toEqual(undefined);
|
||||
});
|
||||
|
||||
it('works with category with link', () => {
|
||||
expect(
|
||||
findFirstCategoryLink(
|
||||
testCategory({
|
||||
href: '/itemPath',
|
||||
}),
|
||||
),
|
||||
).toEqual('/itemPath');
|
||||
});
|
||||
|
||||
it('works with category with deeply nested category link', () => {
|
||||
expect(
|
||||
findFirstCategoryLink(
|
||||
testCategory({
|
||||
href: undefined,
|
||||
items: [
|
||||
{type: 'html', value: '<p>test1</p>'},
|
||||
testCategory({
|
||||
href: undefined,
|
||||
items: [
|
||||
{type: 'html', value: '<p>test2</p>'},
|
||||
testCategory({
|
||||
href: '/itemPath',
|
||||
}),
|
||||
],
|
||||
}),
|
||||
],
|
||||
}),
|
||||
),
|
||||
).toEqual('/itemPath');
|
||||
});
|
||||
|
||||
it('works with category with deeply nested link', () => {
|
||||
expect(
|
||||
findFirstCategoryLink(
|
||||
testCategory({
|
||||
href: undefined,
|
||||
items: [
|
||||
testCategory({
|
||||
href: undefined,
|
||||
items: [{type: 'link', href: '/itemPath', label: 'Label'}],
|
||||
}),
|
||||
],
|
||||
}),
|
||||
),
|
||||
).toEqual('/itemPath');
|
||||
});
|
||||
});
|
||||
|
||||
describe('isActiveSidebarItem', () => {
|
||||
it('works with link href', () => {
|
||||
const item: PropSidebarItem = {
|
||||
type: 'link',
|
||||
href: '/itemPath',
|
||||
label: 'Label',
|
||||
};
|
||||
|
||||
expect(isActiveSidebarItem(item, '/unexistingPath')).toEqual(false);
|
||||
|
||||
expect(isActiveSidebarItem(item, '/itemPath')).toEqual(true);
|
||||
|
||||
// Ensure it's not trailing slash sensitive:
|
||||
expect(isActiveSidebarItem(item, '/itemPath/')).toEqual(true);
|
||||
expect(
|
||||
isActiveSidebarItem({...item, href: '/itemPath/'}, '/itemPath'),
|
||||
).toEqual(true);
|
||||
});
|
||||
|
||||
it('works with category href', () => {
|
||||
const item: PropSidebarItem = testCategory({
|
||||
href: '/itemPath',
|
||||
});
|
||||
|
||||
test('should return empty for sidebar but unknown pathname', () => {
|
||||
const sidebar = [testCategory(), testLink()];
|
||||
expect(
|
||||
getBreadcrumbs({
|
||||
sidebar,
|
||||
pathname: '/doesNotExist',
|
||||
expect(isActiveSidebarItem(item, '/unexistingPath')).toEqual(false);
|
||||
|
||||
expect(isActiveSidebarItem(item, '/itemPath')).toEqual(true);
|
||||
|
||||
// Ensure it's not trailing slash sensitive:
|
||||
expect(isActiveSidebarItem(item, '/itemPath/')).toEqual(true);
|
||||
expect(
|
||||
isActiveSidebarItem({...item, href: '/itemPath/'}, '/itemPath'),
|
||||
).toEqual(true);
|
||||
});
|
||||
|
||||
it('works with category nested items', () => {
|
||||
const item: PropSidebarItem = testCategory({
|
||||
href: '/category-path',
|
||||
items: [
|
||||
{
|
||||
type: 'link',
|
||||
href: '/sub-link-path',
|
||||
label: 'Label',
|
||||
},
|
||||
testCategory({
|
||||
href: '/sub-category-path',
|
||||
items: [
|
||||
{
|
||||
type: 'link',
|
||||
href: '/sub-sub-link-path',
|
||||
label: 'Label',
|
||||
},
|
||||
],
|
||||
}),
|
||||
).toEqual([]);
|
||||
],
|
||||
});
|
||||
|
||||
test('should return first level category', () => {
|
||||
const pathname = '/somePathName';
|
||||
const sidebar = [testCategory({href: pathname}), testLink()];
|
||||
expect(isActiveSidebarItem(item, '/unexistingPath')).toEqual(false);
|
||||
|
||||
expect(
|
||||
getBreadcrumbs({
|
||||
sidebar,
|
||||
pathname,
|
||||
}),
|
||||
).toEqual([sidebar[0]]);
|
||||
expect(isActiveSidebarItem(item, '/category-path')).toEqual(true);
|
||||
expect(isActiveSidebarItem(item, '/sub-link-path')).toEqual(true);
|
||||
expect(isActiveSidebarItem(item, '/sub-category-path')).toEqual(true);
|
||||
expect(isActiveSidebarItem(item, '/sub-sub-link-path')).toEqual(true);
|
||||
|
||||
// Ensure it's not trailing slash sensitive:
|
||||
expect(isActiveSidebarItem(item, '/category-path/')).toEqual(true);
|
||||
expect(isActiveSidebarItem(item, '/sub-link-path/')).toEqual(true);
|
||||
expect(isActiveSidebarItem(item, '/sub-category-path/')).toEqual(true);
|
||||
expect(isActiveSidebarItem(item, '/sub-sub-link-path/')).toEqual(true);
|
||||
});
|
||||
});
|
||||
|
||||
describe('getBreadcrumbs', () => {
|
||||
it('returns empty for empty sidebar', () => {
|
||||
expect(
|
||||
getBreadcrumbs({
|
||||
sidebar: [],
|
||||
pathname: '/doesNotExist',
|
||||
}),
|
||||
).toEqual([]);
|
||||
});
|
||||
|
||||
it('returns empty for sidebar but unknown pathname', () => {
|
||||
const sidebar = [testCategory(), testLink()];
|
||||
expect(
|
||||
getBreadcrumbs({
|
||||
sidebar,
|
||||
pathname: '/doesNotExist',
|
||||
}),
|
||||
).toEqual([]);
|
||||
});
|
||||
|
||||
it('returns first level category', () => {
|
||||
const pathname = '/somePathName';
|
||||
const sidebar = [testCategory({href: pathname}), testLink()];
|
||||
|
||||
expect(
|
||||
getBreadcrumbs({
|
||||
sidebar,
|
||||
pathname,
|
||||
}),
|
||||
).toEqual([sidebar[0]]);
|
||||
});
|
||||
|
||||
it('returns first level link', () => {
|
||||
const pathname = '/somePathName';
|
||||
const sidebar = [testCategory(), testLink({href: pathname})];
|
||||
|
||||
expect(
|
||||
getBreadcrumbs({
|
||||
sidebar,
|
||||
pathname,
|
||||
}),
|
||||
).toEqual([sidebar[1]]);
|
||||
});
|
||||
|
||||
it('returns nested category', () => {
|
||||
const pathname = '/somePathName';
|
||||
|
||||
const categoryLevel3 = testCategory({
|
||||
href: pathname,
|
||||
});
|
||||
|
||||
test('should return first level link', () => {
|
||||
const pathname = '/somePathName';
|
||||
const sidebar = [testCategory(), testLink({href: pathname})];
|
||||
|
||||
expect(
|
||||
getBreadcrumbs({
|
||||
sidebar,
|
||||
pathname,
|
||||
}),
|
||||
).toEqual([sidebar[1]]);
|
||||
});
|
||||
|
||||
test('should return nested category', () => {
|
||||
const pathname = '/somePathName';
|
||||
|
||||
const categoryLevel3 = testCategory({
|
||||
href: pathname,
|
||||
});
|
||||
|
||||
const categoryLevel2 = testCategory({
|
||||
items: [
|
||||
testCategory(),
|
||||
categoryLevel3,
|
||||
testLink({href: pathname}),
|
||||
testLink(),
|
||||
],
|
||||
});
|
||||
|
||||
const categoryLevel1 = testCategory({
|
||||
items: [testLink(), categoryLevel2],
|
||||
});
|
||||
|
||||
const sidebar = [
|
||||
testLink(),
|
||||
const categoryLevel2 = testCategory({
|
||||
items: [
|
||||
testCategory(),
|
||||
categoryLevel1,
|
||||
categoryLevel3,
|
||||
testLink({href: pathname}),
|
||||
testLink(),
|
||||
testCategory(),
|
||||
];
|
||||
|
||||
expect(
|
||||
getBreadcrumbs({
|
||||
sidebar,
|
||||
pathname,
|
||||
}),
|
||||
).toEqual([categoryLevel1, categoryLevel2, categoryLevel3]);
|
||||
],
|
||||
});
|
||||
|
||||
const categoryLevel1 = testCategory({
|
||||
items: [testLink(), categoryLevel2],
|
||||
});
|
||||
|
||||
const sidebar = [
|
||||
testLink(),
|
||||
testCategory(),
|
||||
categoryLevel1,
|
||||
testLink(),
|
||||
testCategory(),
|
||||
];
|
||||
|
||||
expect(
|
||||
getBreadcrumbs({
|
||||
sidebar,
|
||||
pathname,
|
||||
}),
|
||||
).toEqual([categoryLevel1, categoryLevel2, categoryLevel3]);
|
||||
});
|
||||
|
||||
test('should return nested link', () => {
|
||||
it('returns nested link', () => {
|
||||
const pathname = '/somePathName';
|
||||
|
||||
const link = testLink({href: pathname});
|
||||
|
|
|
|||
|
|
@ -8,13 +8,13 @@
|
|||
import {uniq, duplicates} from '../jsUtils';
|
||||
|
||||
describe('duplicates', () => {
|
||||
test('gets duplicate values', () => {
|
||||
it('gets duplicate values', () => {
|
||||
expect(duplicates(['a', 'b', 'c', 'd'])).toEqual([]);
|
||||
expect(duplicates(['a', 'b', 'b', 'b'])).toEqual(['b', 'b']);
|
||||
expect(duplicates(['c', 'b', 'b', 'c'])).toEqual(['b', 'c']);
|
||||
expect(duplicates([{a: 1}, {a: 1}, {a: 1}])).toEqual([]);
|
||||
});
|
||||
test('accepts custom comparator', () => {
|
||||
it('accepts custom comparator', () => {
|
||||
expect(duplicates([{a: 1}, {a: 1}, {a: 1}], (a, b) => a.a === b.a)).toEqual(
|
||||
[{a: 1}, {a: 1}],
|
||||
);
|
||||
|
|
@ -28,7 +28,7 @@ describe('duplicates', () => {
|
|||
});
|
||||
|
||||
describe('uniq', () => {
|
||||
test('remove duplicate primitives', () => {
|
||||
it('remove duplicate primitives', () => {
|
||||
expect(uniq(['A', 'B', 'C', 'B', 'A', 'D'])).toEqual(['A', 'B', 'C', 'D']);
|
||||
expect(uniq([3, 3, 5, 1, 6, 3, 5])).toEqual([3, 5, 1, 6]);
|
||||
expect(uniq([null, undefined, 3, null, 4, 3])).toEqual([
|
||||
|
|
@ -39,7 +39,7 @@ describe('uniq', () => {
|
|||
]);
|
||||
});
|
||||
|
||||
test('remove duplicate objects/arrays by identity', () => {
|
||||
it('remove duplicate objects/arrays by identity', () => {
|
||||
const obj1 = {};
|
||||
const obj2 = {};
|
||||
const obj3 = {};
|
||||
|
|
|
|||
|
|
@ -8,32 +8,32 @@
|
|||
import {isSamePath} from '../pathUtils';
|
||||
|
||||
describe('isSamePath', () => {
|
||||
test('should be true for compared path without trailing slash', () => {
|
||||
it('returns true for compared path without trailing slash', () => {
|
||||
expect(isSamePath('/docs', '/docs')).toBeTruthy();
|
||||
});
|
||||
|
||||
test('should be true for compared path with trailing slash', () => {
|
||||
it('returns true for compared path with trailing slash', () => {
|
||||
expect(isSamePath('/docs', '/docs/')).toBeTruthy();
|
||||
});
|
||||
|
||||
test('should be true for compared path with different case', () => {
|
||||
it('returns true for compared path with different case', () => {
|
||||
expect(isSamePath('/doCS', '/DOcs')).toBeTruthy();
|
||||
});
|
||||
|
||||
test('should be true for compared path with different case + trailing slash', () => {
|
||||
it('returns true for compared path with different case + trailing slash', () => {
|
||||
expect(isSamePath('/doCS', '/DOcs/')).toBeTruthy();
|
||||
});
|
||||
|
||||
test('should be false for compared path with double trailing slash', () => {
|
||||
it('returns false for compared path with double trailing slash', () => {
|
||||
expect(isSamePath('/docs', '/docs//')).toBeFalsy();
|
||||
});
|
||||
|
||||
test('should be true for twice undefined/null', () => {
|
||||
it('returns true for twice undefined/null', () => {
|
||||
expect(isSamePath(undefined, undefined)).toBeTruthy();
|
||||
expect(isSamePath(undefined, undefined)).toBeTruthy();
|
||||
});
|
||||
|
||||
test('should be false when one undefined', () => {
|
||||
it('returns false when one undefined', () => {
|
||||
expect(isSamePath('/docs', undefined)).toBeFalsy();
|
||||
expect(isSamePath(undefined, '/docs')).toBeFalsy();
|
||||
});
|
||||
|
|
|
|||
|
|
@ -8,7 +8,7 @@
|
|||
import {isRegexpStringMatch} from '../regexpUtils';
|
||||
|
||||
describe('isRegexpStringMatch', () => {
|
||||
test('behaves correctly', () => {
|
||||
it('works', () => {
|
||||
expect(isRegexpStringMatch(undefined, 'foo')).toEqual(false);
|
||||
expect(isRegexpStringMatch('bar', undefined)).toEqual(false);
|
||||
expect(isRegexpStringMatch('foo', 'bar')).toEqual(false);
|
||||
|
|
|
|||
|
|
@ -8,17 +8,17 @@
|
|||
import {type Route} from '@generated/routes';
|
||||
import {findHomePageRoute} from '../routesUtils';
|
||||
|
||||
describe('routesUtils findHomePageRoute', () => {
|
||||
describe('findHomePageRoute', () => {
|
||||
const homePage: Route = {
|
||||
path: '/',
|
||||
exact: true,
|
||||
};
|
||||
|
||||
test('should return undefined for no routes', () => {
|
||||
it('returns undefined for no routes', () => {
|
||||
expect(findHomePageRoute({baseUrl: '/', routes: []})).toEqual(undefined);
|
||||
});
|
||||
|
||||
test('should return undefined for no homepage', () => {
|
||||
it('returns undefined for no homepage', () => {
|
||||
expect(
|
||||
findHomePageRoute({
|
||||
baseUrl: '/',
|
||||
|
|
@ -40,7 +40,7 @@ describe('routesUtils findHomePageRoute', () => {
|
|||
).toEqual(undefined);
|
||||
});
|
||||
|
||||
test('should find top-level homepage', () => {
|
||||
it('finds top-level homepage', () => {
|
||||
expect(
|
||||
findHomePageRoute({
|
||||
baseUrl: '/',
|
||||
|
|
@ -56,7 +56,7 @@ describe('routesUtils findHomePageRoute', () => {
|
|||
).toEqual(homePage);
|
||||
});
|
||||
|
||||
test('should find nested homepage', () => {
|
||||
it('finds nested homepage', () => {
|
||||
expect(
|
||||
findHomePageRoute({
|
||||
baseUrl: '/',
|
||||
|
|
@ -80,7 +80,7 @@ describe('routesUtils findHomePageRoute', () => {
|
|||
).toEqual(homePage);
|
||||
});
|
||||
|
||||
test('should find nested homepage with baseUrl', () => {
|
||||
it('finds nested homepage with baseUrl', () => {
|
||||
const baseUrl = '/baseUrl/';
|
||||
const baseUrlHomePage = {...homePage, path: baseUrl};
|
||||
expect(
|
||||
|
|
|
|||
|
|
@ -8,7 +8,7 @@
|
|||
import {docVersionSearchTag} from '../searchUtils';
|
||||
|
||||
describe('docVersionSearchTag', () => {
|
||||
test('behaves correctly', () => {
|
||||
it('works', () => {
|
||||
expect(docVersionSearchTag('foo', 'bar')).toEqual('docs-foo-bar');
|
||||
});
|
||||
});
|
||||
|
|
|
|||
|
|
@ -13,7 +13,7 @@ describe('listTagsByLetters', () => {
|
|||
type Tag = Param[number];
|
||||
type Result = ReturnType<typeof listTagsByLetters>;
|
||||
|
||||
test('Should create letters list', () => {
|
||||
it('creates letters list', () => {
|
||||
const tag1: Tag = {
|
||||
name: 'tag1',
|
||||
permalink: '/tag1',
|
||||
|
|
|
|||
|
|
@ -10,7 +10,7 @@ import {renderHook} from '@testing-library/react-hooks';
|
|||
import {useFilteredAndTreeifiedTOC} from '../tocUtils';
|
||||
|
||||
describe('useFilteredAndTreeifiedTOC', () => {
|
||||
test('filter a toc with all heading levels', () => {
|
||||
it('filters a toc with all heading levels', () => {
|
||||
const toc: TOCItem[] = [
|
||||
{
|
||||
id: 'alpha',
|
||||
|
|
@ -137,7 +137,7 @@ describe('useFilteredAndTreeifiedTOC', () => {
|
|||
// It's not 100% clear exactly how the TOC should behave under weird heading
|
||||
// levels provided by the user. Adding a test so that behavior stays the same
|
||||
// over time
|
||||
test('filter invalid heading levels (but possible) TOC', () => {
|
||||
it('filters invalid heading levels (but possible) TOC', () => {
|
||||
const toc: TOCItem[] = [
|
||||
{
|
||||
id: 'charlie',
|
||||
|
|
|
|||
|
|
@ -22,7 +22,7 @@ function testValidateThemeConfig(themeConfig) {
|
|||
}
|
||||
|
||||
describe('validateThemeConfig', () => {
|
||||
test('undefined config', () => {
|
||||
it('undefined config', () => {
|
||||
const liveCodeBlock = undefined;
|
||||
expect(testValidateThemeConfig({liveCodeBlock})).toEqual({
|
||||
liveCodeBlock: {
|
||||
|
|
@ -31,7 +31,7 @@ describe('validateThemeConfig', () => {
|
|||
});
|
||||
});
|
||||
|
||||
test('unexist config', () => {
|
||||
it('unexist config', () => {
|
||||
expect(testValidateThemeConfig({})).toEqual({
|
||||
liveCodeBlock: {
|
||||
...DEFAULT_CONFIG,
|
||||
|
|
@ -39,7 +39,7 @@ describe('validateThemeConfig', () => {
|
|||
});
|
||||
});
|
||||
|
||||
test('empty config', () => {
|
||||
it('empty config', () => {
|
||||
const liveCodeBlock = {};
|
||||
expect(testValidateThemeConfig({liveCodeBlock})).toEqual({
|
||||
liveCodeBlock: {
|
||||
|
|
@ -48,7 +48,7 @@ describe('validateThemeConfig', () => {
|
|||
});
|
||||
});
|
||||
|
||||
test('playgroundPosition top', () => {
|
||||
it('playgroundPosition top', () => {
|
||||
const liveCodeBlock = {
|
||||
playgroundPosition: 'top',
|
||||
};
|
||||
|
|
@ -60,7 +60,7 @@ describe('validateThemeConfig', () => {
|
|||
});
|
||||
});
|
||||
|
||||
test('playgroundPosition bottom', () => {
|
||||
it('playgroundPosition bottom', () => {
|
||||
const liveCodeBlock = {
|
||||
playgroundPosition: 'bottom',
|
||||
};
|
||||
|
|
@ -72,7 +72,7 @@ describe('validateThemeConfig', () => {
|
|||
});
|
||||
});
|
||||
|
||||
test('playgroundPosition invalid string', () => {
|
||||
it('playgroundPosition invalid string', () => {
|
||||
const liveCodeBlock = {playgroundPosition: 'invalid'};
|
||||
expect(() =>
|
||||
testValidateThemeConfig({liveCodeBlock}),
|
||||
|
|
@ -80,7 +80,7 @@ describe('validateThemeConfig', () => {
|
|||
`"\\"liveCodeBlock.playgroundPosition\\" must be one of [top, bottom]"`,
|
||||
);
|
||||
});
|
||||
test('playgroundPosition invalid boolean', () => {
|
||||
it('playgroundPosition invalid boolean', () => {
|
||||
const liveCodeBlock = {playgroundPosition: true};
|
||||
expect(() =>
|
||||
testValidateThemeConfig({liveCodeBlock}),
|
||||
|
|
|
|||
|
|
@ -23,7 +23,7 @@ function testValidateThemeConfig(themeConfig: Record<string, unknown>) {
|
|||
}
|
||||
|
||||
describe('validateThemeConfig', () => {
|
||||
test('minimal config', () => {
|
||||
it('minimal config', () => {
|
||||
const algolia = {
|
||||
indexName: 'index',
|
||||
apiKey: 'apiKey',
|
||||
|
|
@ -37,7 +37,7 @@ describe('validateThemeConfig', () => {
|
|||
});
|
||||
});
|
||||
|
||||
test('unknown attributes', () => {
|
||||
it('unknown attributes', () => {
|
||||
const algolia = {
|
||||
indexName: 'index',
|
||||
apiKey: 'apiKey',
|
||||
|
|
@ -52,7 +52,7 @@ describe('validateThemeConfig', () => {
|
|||
});
|
||||
});
|
||||
|
||||
test('undefined config', () => {
|
||||
it('undefined config', () => {
|
||||
const algolia = undefined;
|
||||
expect(() =>
|
||||
testValidateThemeConfig({algolia}),
|
||||
|
|
@ -61,7 +61,7 @@ describe('validateThemeConfig', () => {
|
|||
);
|
||||
});
|
||||
|
||||
test('undefined config 2', () => {
|
||||
it('undefined config 2', () => {
|
||||
expect(() =>
|
||||
testValidateThemeConfig({}),
|
||||
).toThrowErrorMatchingInlineSnapshot(
|
||||
|
|
@ -69,7 +69,7 @@ describe('validateThemeConfig', () => {
|
|||
);
|
||||
});
|
||||
|
||||
test('missing indexName config', () => {
|
||||
it('missing indexName config', () => {
|
||||
const algolia = {apiKey: 'apiKey', appId: 'BH4D9OD16A'};
|
||||
expect(() =>
|
||||
testValidateThemeConfig({algolia}),
|
||||
|
|
@ -78,14 +78,14 @@ describe('validateThemeConfig', () => {
|
|||
);
|
||||
});
|
||||
|
||||
test('missing apiKey config', () => {
|
||||
it('missing apiKey config', () => {
|
||||
const algolia = {indexName: 'indexName', appId: 'BH4D9OD16A'};
|
||||
expect(() =>
|
||||
testValidateThemeConfig({algolia}),
|
||||
).toThrowErrorMatchingInlineSnapshot(`"\\"algolia.apiKey\\" is required"`);
|
||||
});
|
||||
|
||||
test('missing appId config', () => {
|
||||
it('missing appId config', () => {
|
||||
const algolia = {indexName: 'indexName', apiKey: 'apiKey'};
|
||||
expect(() =>
|
||||
testValidateThemeConfig({algolia}),
|
||||
|
|
@ -94,7 +94,7 @@ describe('validateThemeConfig', () => {
|
|||
);
|
||||
});
|
||||
|
||||
test('contextualSearch config', () => {
|
||||
it('contextualSearch config', () => {
|
||||
const algolia = {
|
||||
appId: 'BH4D9OD16A',
|
||||
indexName: 'index',
|
||||
|
|
@ -109,7 +109,7 @@ describe('validateThemeConfig', () => {
|
|||
});
|
||||
});
|
||||
|
||||
test('externalUrlRegex config', () => {
|
||||
it('externalUrlRegex config', () => {
|
||||
const algolia = {
|
||||
appId: 'BH4D9OD16A',
|
||||
indexName: 'index',
|
||||
|
|
@ -124,7 +124,7 @@ describe('validateThemeConfig', () => {
|
|||
});
|
||||
});
|
||||
|
||||
test('searchParameters.facetFilters search config', () => {
|
||||
it('searchParameters.facetFilters search config', () => {
|
||||
const algolia = {
|
||||
appId: 'BH4D9OD16A',
|
||||
indexName: 'index',
|
||||
|
|
|
|||
|
|
@ -14,29 +14,27 @@ import _ from 'lodash';
|
|||
// Seems the 5s default timeout fails sometimes
|
||||
jest.setTimeout(15000);
|
||||
|
||||
describe('theme-translations package', () => {
|
||||
test(`to have base messages files contain EXACTLY all the translations extracted from the theme. Please run "yarn workspace @docusaurus/theme-translations update" to keep base messages files up-to-date.`, async () => {
|
||||
describe('theme translations', () => {
|
||||
it('has base messages files contain EXACTLY all the translations extracted from the theme. Please run "yarn workspace @docusaurus/theme-translations update" to keep base messages files up-to-date', async () => {
|
||||
const baseMessagesDirPath = path.join(__dirname, '../locales/base');
|
||||
const baseMessages = Object.fromEntries(
|
||||
(
|
||||
await Promise.all(
|
||||
(
|
||||
await fs.readdir(baseMessagesDirPath)
|
||||
).map(async (baseMessagesFile) =>
|
||||
Object.entries(
|
||||
JSON.parse(
|
||||
(
|
||||
await fs.readFile(
|
||||
path.join(baseMessagesDirPath, baseMessagesFile),
|
||||
)
|
||||
).toString(),
|
||||
) as Record<string, string>,
|
||||
),
|
||||
await Promise.all(
|
||||
(
|
||||
await fs.readdir(baseMessagesDirPath)
|
||||
).map(async (baseMessagesFile) =>
|
||||
Object.entries(
|
||||
JSON.parse(
|
||||
(
|
||||
await fs.readFile(
|
||||
path.join(baseMessagesDirPath, baseMessagesFile),
|
||||
)
|
||||
).toString(),
|
||||
) as Record<string, string>,
|
||||
),
|
||||
)
|
||||
)
|
||||
.flat()
|
||||
.filter(([key]) => !key.endsWith('___DESCRIPTION')),
|
||||
),
|
||||
).then((translations) =>
|
||||
translations.flat().filter(([key]) => !key.endsWith('___DESCRIPTION')),
|
||||
),
|
||||
);
|
||||
const codeMessages = _.mapValues(
|
||||
await extractThemeCodeMessages(),
|
||||
|
|
|
|||
|
|
@ -13,7 +13,7 @@ import {
|
|||
} from '../index';
|
||||
|
||||
describe('codeTranslationLocalesToTry', () => {
|
||||
test('should return appropriate locale lists', () => {
|
||||
it('returns appropriate locale lists', () => {
|
||||
expect(codeTranslationLocalesToTry('fr')).toEqual([
|
||||
'fr',
|
||||
'fr-FR',
|
||||
|
|
@ -56,7 +56,7 @@ describe('readDefaultCodeTranslationMessages', () => {
|
|||
);
|
||||
}
|
||||
|
||||
test('for empty locale', async () => {
|
||||
it('for empty locale', async () => {
|
||||
await expect(
|
||||
readDefaultCodeTranslationMessages({
|
||||
locale: '',
|
||||
|
|
@ -67,7 +67,7 @@ describe('readDefaultCodeTranslationMessages', () => {
|
|||
);
|
||||
});
|
||||
|
||||
test('for unexisting locale', async () => {
|
||||
it('for unexisting locale', async () => {
|
||||
await expect(
|
||||
readDefaultCodeTranslationMessages({
|
||||
locale: 'es',
|
||||
|
|
@ -77,7 +77,7 @@ describe('readDefaultCodeTranslationMessages', () => {
|
|||
).resolves.toEqual({});
|
||||
});
|
||||
|
||||
test('for fr but bad folder', async () => {
|
||||
it('for fr but bad folder', async () => {
|
||||
await expect(
|
||||
readDefaultCodeTranslationMessages({
|
||||
locale: 'fr',
|
||||
|
|
@ -87,7 +87,7 @@ describe('readDefaultCodeTranslationMessages', () => {
|
|||
).resolves.toEqual({});
|
||||
});
|
||||
|
||||
test('for fr', async () => {
|
||||
it('for fr', async () => {
|
||||
await expect(
|
||||
readDefaultCodeTranslationMessages({
|
||||
locale: 'fr',
|
||||
|
|
@ -97,7 +97,7 @@ describe('readDefaultCodeTranslationMessages', () => {
|
|||
).resolves.toEqual(await readAsJSON('fr'));
|
||||
});
|
||||
|
||||
test('for fr-FR', async () => {
|
||||
it('for fr-FR', async () => {
|
||||
await expect(
|
||||
readDefaultCodeTranslationMessages({
|
||||
locale: 'fr-FR',
|
||||
|
|
@ -107,7 +107,7 @@ describe('readDefaultCodeTranslationMessages', () => {
|
|||
).resolves.toEqual(await readAsJSON('fr-FR'));
|
||||
});
|
||||
|
||||
test('for en', async () => {
|
||||
it('for en', async () => {
|
||||
await expect(
|
||||
readDefaultCodeTranslationMessages({
|
||||
locale: 'en',
|
||||
|
|
@ -117,7 +117,7 @@ describe('readDefaultCodeTranslationMessages', () => {
|
|||
).resolves.toEqual(await readAsJSON('en'));
|
||||
});
|
||||
|
||||
test('for en-US', async () => {
|
||||
it('for en-US', async () => {
|
||||
await expect(
|
||||
readDefaultCodeTranslationMessages({
|
||||
locale: 'en-US',
|
||||
|
|
@ -127,7 +127,7 @@ describe('readDefaultCodeTranslationMessages', () => {
|
|||
).resolves.toEqual(await readAsJSON('en'));
|
||||
});
|
||||
|
||||
test('for en-WHATEVER', async () => {
|
||||
it('for en-WHATEVER', async () => {
|
||||
await expect(
|
||||
readDefaultCodeTranslationMessages({
|
||||
locale: 'en-WHATEVER',
|
||||
|
|
@ -137,7 +137,7 @@ describe('readDefaultCodeTranslationMessages', () => {
|
|||
).resolves.toEqual(await readAsJSON('en'));
|
||||
});
|
||||
|
||||
test('default locale', async () => {
|
||||
it('default locale', async () => {
|
||||
await expect(
|
||||
readDefaultCodeTranslationMessages({
|
||||
locale: 'zh',
|
||||
|
|
|
|||
|
|
@ -17,13 +17,13 @@ function params(
|
|||
}
|
||||
|
||||
describe('applyTrailingSlash', () => {
|
||||
test('should apply to empty', () => {
|
||||
it('applies to empty', () => {
|
||||
expect(applyTrailingSlash('', params(true))).toEqual('/');
|
||||
expect(applyTrailingSlash('', params(false))).toEqual('');
|
||||
expect(applyTrailingSlash('', params(undefined))).toEqual('');
|
||||
});
|
||||
|
||||
test('should not apply to /', () => {
|
||||
it('does not apply to /', () => {
|
||||
expect(applyTrailingSlash('/', params(true))).toEqual('/');
|
||||
expect(applyTrailingSlash('/', params(false))).toEqual('/');
|
||||
expect(applyTrailingSlash('/', params(undefined))).toEqual('/');
|
||||
|
|
@ -39,7 +39,7 @@ describe('applyTrailingSlash', () => {
|
|||
);
|
||||
});
|
||||
|
||||
test('should not apply to /baseUrl/', () => {
|
||||
it('does not apply to /baseUrl/', () => {
|
||||
const baseUrl = '/baseUrl/';
|
||||
expect(applyTrailingSlash('/baseUrl/', params(true, baseUrl))).toEqual(
|
||||
'/baseUrl/',
|
||||
|
|
@ -62,7 +62,7 @@ describe('applyTrailingSlash', () => {
|
|||
).toEqual('/baseUrl/?query#anchor');
|
||||
});
|
||||
|
||||
test('should not apply to #anchor links', () => {
|
||||
it('does not apply to #anchor links', () => {
|
||||
expect(applyTrailingSlash('#', params(true))).toEqual('#');
|
||||
expect(applyTrailingSlash('#', params(false))).toEqual('#');
|
||||
expect(applyTrailingSlash('#', params(undefined))).toEqual('#');
|
||||
|
|
@ -71,7 +71,7 @@ describe('applyTrailingSlash', () => {
|
|||
expect(applyTrailingSlash('#anchor', params(undefined))).toEqual('#anchor');
|
||||
});
|
||||
|
||||
test('should apply to simple paths', () => {
|
||||
it('applies to simple paths', () => {
|
||||
expect(applyTrailingSlash('abc', params(true))).toEqual('abc/');
|
||||
expect(applyTrailingSlash('abc', params(false))).toEqual('abc');
|
||||
expect(applyTrailingSlash('abc', params(undefined))).toEqual('abc');
|
||||
|
|
@ -86,7 +86,7 @@ describe('applyTrailingSlash', () => {
|
|||
expect(applyTrailingSlash('/abc/', params(undefined))).toEqual('/abc/');
|
||||
});
|
||||
|
||||
test('should apply to path with #anchor', () => {
|
||||
it('applies to path with #anchor', () => {
|
||||
expect(applyTrailingSlash('/abc#anchor', params(true))).toEqual(
|
||||
'/abc/#anchor',
|
||||
);
|
||||
|
|
@ -107,7 +107,7 @@ describe('applyTrailingSlash', () => {
|
|||
);
|
||||
});
|
||||
|
||||
test('should apply to path with ?search', () => {
|
||||
it('applies to path with ?search', () => {
|
||||
expect(applyTrailingSlash('/abc?search', params(true))).toEqual(
|
||||
'/abc/?search',
|
||||
);
|
||||
|
|
@ -128,7 +128,7 @@ describe('applyTrailingSlash', () => {
|
|||
);
|
||||
});
|
||||
|
||||
test('should apply to path with ?search#anchor', () => {
|
||||
it('applies to path with ?search#anchor', () => {
|
||||
expect(applyTrailingSlash('/abc?search#anchor', params(true))).toEqual(
|
||||
'/abc/?search#anchor',
|
||||
);
|
||||
|
|
@ -149,7 +149,7 @@ describe('applyTrailingSlash', () => {
|
|||
).toEqual('/abc/?search#anchor');
|
||||
});
|
||||
|
||||
test('should apply to fully qualified urls', () => {
|
||||
it('applies to fully qualified urls', () => {
|
||||
expect(
|
||||
applyTrailingSlash('https://xyz.com/abc?search#anchor', params(true)),
|
||||
).toEqual('https://xyz.com/abc/?search#anchor');
|
||||
|
|
|
|||
|
|
@ -1,67 +1,67 @@
|
|||
// Jest Snapshot v1, https://goo.gl/fbAQLP
|
||||
|
||||
exports[`validation schemas AdmonitionsSchema: for value=[] 1`] = `"\\"value\\" must be of type object"`;
|
||||
exports[`validation schemas admonitionsSchema: for value=[] 1`] = `"\\"value\\" must be of type object"`;
|
||||
|
||||
exports[`validation schemas AdmonitionsSchema: for value=3 1`] = `"\\"value\\" must be of type object"`;
|
||||
exports[`validation schemas admonitionsSchema: for value=3 1`] = `"\\"value\\" must be of type object"`;
|
||||
|
||||
exports[`validation schemas AdmonitionsSchema: for value=null 1`] = `"\\"value\\" must be of type object"`;
|
||||
exports[`validation schemas admonitionsSchema: for value=null 1`] = `"\\"value\\" must be of type object"`;
|
||||
|
||||
exports[`validation schemas AdmonitionsSchema: for value=true 1`] = `"\\"value\\" must be of type object"`;
|
||||
exports[`validation schemas admonitionsSchema: for value=true 1`] = `"\\"value\\" must be of type object"`;
|
||||
|
||||
exports[`validation schemas PathnameSchema: for value="foo" 1`] = `"\\"value\\" is not a valid pathname. Pathname should start with slash and not contain any domain or query string."`;
|
||||
exports[`validation schemas pathnameSchema: for value="foo" 1`] = `"\\"value\\" is not a valid pathname. Pathname should start with slash and not contain any domain or query string."`;
|
||||
|
||||
exports[`validation schemas PathnameSchema: for value="https://github.com/foo" 1`] = `"\\"value\\" is not a valid pathname. Pathname should start with slash and not contain any domain or query string."`;
|
||||
exports[`validation schemas pathnameSchema: for value="https://github.com/foo" 1`] = `"\\"value\\" is not a valid pathname. Pathname should start with slash and not contain any domain or query string."`;
|
||||
|
||||
exports[`validation schemas PluginIdSchema: for value="/docs" 1`] = `"Illegal plugin ID value \\"/docs\\": it should only contain alphanumerics, underscores, and dashes."`;
|
||||
exports[`validation schemas pluginIdSchema: for value="/docs" 1`] = `"Illegal plugin ID value \\"/docs\\": it should only contain alphanumerics, underscores, and dashes."`;
|
||||
|
||||
exports[`validation schemas PluginIdSchema: for value="do cs" 1`] = `"Illegal plugin ID value \\"do cs\\": it should only contain alphanumerics, underscores, and dashes."`;
|
||||
exports[`validation schemas pluginIdSchema: for value="do cs" 1`] = `"Illegal plugin ID value \\"do cs\\": it should only contain alphanumerics, underscores, and dashes."`;
|
||||
|
||||
exports[`validation schemas PluginIdSchema: for value="do/cs" 1`] = `"Illegal plugin ID value \\"do/cs\\": it should only contain alphanumerics, underscores, and dashes."`;
|
||||
exports[`validation schemas pluginIdSchema: for value="do/cs" 1`] = `"Illegal plugin ID value \\"do/cs\\": it should only contain alphanumerics, underscores, and dashes."`;
|
||||
|
||||
exports[`validation schemas PluginIdSchema: for value="docs/" 1`] = `"Illegal plugin ID value \\"docs/\\": it should only contain alphanumerics, underscores, and dashes."`;
|
||||
exports[`validation schemas pluginIdSchema: for value="docs/" 1`] = `"Illegal plugin ID value \\"docs/\\": it should only contain alphanumerics, underscores, and dashes."`;
|
||||
|
||||
exports[`validation schemas PluginIdSchema: for value=[] 1`] = `"\\"value\\" must be a string"`;
|
||||
exports[`validation schemas pluginIdSchema: for value=[] 1`] = `"\\"value\\" must be a string"`;
|
||||
|
||||
exports[`validation schemas PluginIdSchema: for value=3 1`] = `"\\"value\\" must be a string"`;
|
||||
exports[`validation schemas pluginIdSchema: for value=3 1`] = `"\\"value\\" must be a string"`;
|
||||
|
||||
exports[`validation schemas PluginIdSchema: for value=null 1`] = `"\\"value\\" must be a string"`;
|
||||
exports[`validation schemas pluginIdSchema: for value=null 1`] = `"\\"value\\" must be a string"`;
|
||||
|
||||
exports[`validation schemas PluginIdSchema: for value=true 1`] = `"\\"value\\" must be a string"`;
|
||||
exports[`validation schemas pluginIdSchema: for value=true 1`] = `"\\"value\\" must be a string"`;
|
||||
|
||||
exports[`validation schemas RehypePluginsSchema: for value=[[]] 1`] = `"\\"[0]\\" does not match any of the allowed types"`;
|
||||
exports[`validation schemas rehypePluginsSchema: for value=[[]] 1`] = `"\\"[0]\\" does not match any of the allowed types"`;
|
||||
|
||||
exports[`validation schemas RehypePluginsSchema: for value=[[null,null]] 1`] = `"\\"[0]\\" does not match any of the allowed types"`;
|
||||
exports[`validation schemas rehypePluginsSchema: for value=[[null,null]] 1`] = `"\\"[0]\\" does not match any of the allowed types"`;
|
||||
|
||||
exports[`validation schemas RehypePluginsSchema: for value=[[null,true]] 1`] = `"\\"[0]\\" does not match any of the allowed types"`;
|
||||
exports[`validation schemas rehypePluginsSchema: for value=[[null,true]] 1`] = `"\\"[0]\\" does not match any of the allowed types"`;
|
||||
|
||||
exports[`validation schemas RehypePluginsSchema: for value=[3] 1`] = `"\\"[0]\\" does not match any of the allowed types"`;
|
||||
exports[`validation schemas rehypePluginsSchema: for value=[3] 1`] = `"\\"[0]\\" does not match any of the allowed types"`;
|
||||
|
||||
exports[`validation schemas RehypePluginsSchema: for value=[false] 1`] = `"\\"[0]\\" does not match any of the allowed types"`;
|
||||
exports[`validation schemas rehypePluginsSchema: for value=[false] 1`] = `"\\"[0]\\" does not match any of the allowed types"`;
|
||||
|
||||
exports[`validation schemas RehypePluginsSchema: for value=[null] 1`] = `"\\"[0]\\" does not match any of the allowed types"`;
|
||||
exports[`validation schemas rehypePluginsSchema: for value=[null] 1`] = `"\\"[0]\\" does not match any of the allowed types"`;
|
||||
|
||||
exports[`validation schemas RehypePluginsSchema: for value=3 1`] = `"\\"value\\" must be an array"`;
|
||||
exports[`validation schemas rehypePluginsSchema: for value=3 1`] = `"\\"value\\" must be an array"`;
|
||||
|
||||
exports[`validation schemas RehypePluginsSchema: for value=false 1`] = `"\\"value\\" must be an array"`;
|
||||
exports[`validation schemas rehypePluginsSchema: for value=false 1`] = `"\\"value\\" must be an array"`;
|
||||
|
||||
exports[`validation schemas RehypePluginsSchema: for value=null 1`] = `"\\"value\\" must be an array"`;
|
||||
exports[`validation schemas rehypePluginsSchema: for value=null 1`] = `"\\"value\\" must be an array"`;
|
||||
|
||||
exports[`validation schemas RemarkPluginsSchema: for value=[[]] 1`] = `"\\"[0]\\" does not match any of the allowed types"`;
|
||||
exports[`validation schemas remarkPluginsSchema: for value=[[]] 1`] = `"\\"[0]\\" does not match any of the allowed types"`;
|
||||
|
||||
exports[`validation schemas RemarkPluginsSchema: for value=[[null,null]] 1`] = `"\\"[0]\\" does not match any of the allowed types"`;
|
||||
exports[`validation schemas remarkPluginsSchema: for value=[[null,null]] 1`] = `"\\"[0]\\" does not match any of the allowed types"`;
|
||||
|
||||
exports[`validation schemas RemarkPluginsSchema: for value=[[null,true]] 1`] = `"\\"[0]\\" does not match any of the allowed types"`;
|
||||
exports[`validation schemas remarkPluginsSchema: for value=[[null,true]] 1`] = `"\\"[0]\\" does not match any of the allowed types"`;
|
||||
|
||||
exports[`validation schemas RemarkPluginsSchema: for value=[3] 1`] = `"\\"[0]\\" does not match any of the allowed types"`;
|
||||
exports[`validation schemas remarkPluginsSchema: for value=[3] 1`] = `"\\"[0]\\" does not match any of the allowed types"`;
|
||||
|
||||
exports[`validation schemas RemarkPluginsSchema: for value=[false] 1`] = `"\\"[0]\\" does not match any of the allowed types"`;
|
||||
exports[`validation schemas remarkPluginsSchema: for value=[false] 1`] = `"\\"[0]\\" does not match any of the allowed types"`;
|
||||
|
||||
exports[`validation schemas RemarkPluginsSchema: for value=[null] 1`] = `"\\"[0]\\" does not match any of the allowed types"`;
|
||||
exports[`validation schemas remarkPluginsSchema: for value=[null] 1`] = `"\\"[0]\\" does not match any of the allowed types"`;
|
||||
|
||||
exports[`validation schemas RemarkPluginsSchema: for value=3 1`] = `"\\"value\\" must be an array"`;
|
||||
exports[`validation schemas remarkPluginsSchema: for value=3 1`] = `"\\"value\\" must be an array"`;
|
||||
|
||||
exports[`validation schemas RemarkPluginsSchema: for value=false 1`] = `"\\"value\\" must be an array"`;
|
||||
exports[`validation schemas remarkPluginsSchema: for value=false 1`] = `"\\"value\\" must be an array"`;
|
||||
|
||||
exports[`validation schemas RemarkPluginsSchema: for value=null 1`] = `"\\"value\\" must be an array"`;
|
||||
exports[`validation schemas remarkPluginsSchema: for value=null 1`] = `"\\"value\\" must be an array"`;
|
||||
|
||||
exports[`validation schemas URISchema: for value="spaces are invalid in a URL" 1`] = `"\\"value\\" does not look like a valid url (value='')"`;
|
||||
exports[`validation schemas uRISchema: for value="spaces are invalid in a URL" 1`] = `"\\"value\\" does not look like a valid url (value='')"`;
|
||||
|
|
|
|||
|
|
@ -59,7 +59,7 @@ function testMarkdownPluginSchemas(schema: Joi.Schema) {
|
|||
}
|
||||
|
||||
describe('validation schemas', () => {
|
||||
test('PluginIdSchema', () => {
|
||||
it('pluginIdSchema', () => {
|
||||
const {testOK, testFail} = createTestHelpers({
|
||||
schema: PluginIdSchema,
|
||||
defaultValue: 'default',
|
||||
|
|
@ -81,7 +81,7 @@ describe('validation schemas', () => {
|
|||
testFail([]);
|
||||
});
|
||||
|
||||
test('AdmonitionsSchema', () => {
|
||||
it('admonitionsSchema', () => {
|
||||
const {testOK, testFail} = createTestHelpers({
|
||||
schema: AdmonitionsSchema,
|
||||
defaultValue: {},
|
||||
|
|
@ -97,15 +97,15 @@ describe('validation schemas', () => {
|
|||
testFail([]);
|
||||
});
|
||||
|
||||
test('RemarkPluginsSchema', () => {
|
||||
it('remarkPluginsSchema', () => {
|
||||
testMarkdownPluginSchemas(RemarkPluginsSchema);
|
||||
});
|
||||
|
||||
test('RehypePluginsSchema', () => {
|
||||
it('rehypePluginsSchema', () => {
|
||||
testMarkdownPluginSchemas(RehypePluginsSchema);
|
||||
});
|
||||
|
||||
test('URISchema', () => {
|
||||
it('uRISchema', () => {
|
||||
const {testFail, testOK} = createTestHelpers({schema: URISchema});
|
||||
|
||||
const validURL = 'https://docusaurus.io';
|
||||
|
|
@ -127,7 +127,7 @@ describe('validation schemas', () => {
|
|||
testOK(protocolRelativeUrl2);
|
||||
});
|
||||
|
||||
test('PathnameSchema', () => {
|
||||
it('pathnameSchema', () => {
|
||||
const {testFail, testOK} = createTestHelpers({schema: PathnameSchema});
|
||||
|
||||
testOK('/foo');
|
||||
|
|
|
|||
|
|
@ -11,7 +11,7 @@ import {JoiFrontMatter} from '../JoiFrontMatter';
|
|||
import {validateFrontMatter} from '../validationUtils';
|
||||
|
||||
describe('validateFrontMatter', () => {
|
||||
test('should accept good values', () => {
|
||||
it('accepts good values', () => {
|
||||
const schema = Joi.object<{test: string}>({
|
||||
test: Joi.string(),
|
||||
});
|
||||
|
|
@ -21,7 +21,7 @@ describe('validateFrontMatter', () => {
|
|||
expect(validateFrontMatter(frontMatter, schema)).toEqual(frontMatter);
|
||||
});
|
||||
|
||||
test('should reject bad values', () => {
|
||||
it('rejects bad values', () => {
|
||||
const consoleError = jest
|
||||
.spyOn(console, 'error')
|
||||
.mockImplementation(() => {});
|
||||
|
|
@ -39,7 +39,7 @@ describe('validateFrontMatter', () => {
|
|||
);
|
||||
});
|
||||
|
||||
test('should not convert simple values', () => {
|
||||
it('does not convert simple values', () => {
|
||||
const schema = Joi.object({
|
||||
test: JoiFrontMatter.string(),
|
||||
});
|
||||
|
|
@ -53,7 +53,7 @@ describe('validateFrontMatter', () => {
|
|||
// Fix Yaml trying to convert strings to numbers automatically
|
||||
// We only want to deal with a single type in the final front matter
|
||||
// (not string | number)
|
||||
test('should convert number values to string when string schema', () => {
|
||||
it('converts number values to string when string schema', () => {
|
||||
const schema = Joi.object<{test: string}>({
|
||||
test: JoiFrontMatter.string(),
|
||||
});
|
||||
|
|
@ -66,7 +66,7 @@ describe('validateFrontMatter', () => {
|
|||
// Helps to fix Yaml trying to convert strings to dates automatically
|
||||
// We only want to deal with a single type in the final front matter
|
||||
// (not string | Date)
|
||||
test('should convert date values when string schema', () => {
|
||||
it('converts date values when string schema', () => {
|
||||
const schema = Joi.object<{test: string}>({
|
||||
test: JoiFrontMatter.string(),
|
||||
});
|
||||
|
|
|
|||
|
|
@ -0,0 +1,136 @@
|
|||
// Jest Snapshot v1, https://goo.gl/fbAQLP
|
||||
|
||||
exports[`replaceMarkdownLinks does basic replace 1`] = `
|
||||
Object {
|
||||
"brokenMarkdownLinks": Array [
|
||||
Object {
|
||||
"contentPaths": Object {
|
||||
"contentPath": "docs",
|
||||
"contentPathLocalized": "i18n/docs-localized",
|
||||
},
|
||||
"filePath": "docs/intro.md",
|
||||
"link": "hmmm.md",
|
||||
},
|
||||
],
|
||||
"newContent": "
|
||||
[foo](/doc/foo)
|
||||
[baz](/doc/baz)
|
||||
[foo](/doc/foo)
|
||||
[http](http://github.com/facebook/docusaurus/README.md)
|
||||
[https](https://github.com/facebook/docusaurus/README.md)
|
||||
[asset](./foo.js)
|
||||
[asset as well](@site/docs/_partial.md)
|
||||
[looks like http...](/doc/http)
|
||||
[nonexistent](hmmm.md)
|
||||
",
|
||||
}
|
||||
`;
|
||||
|
||||
exports[`replaceMarkdownLinks ignores links in HTML comments 1`] = `
|
||||
Object {
|
||||
"brokenMarkdownLinks": Array [
|
||||
Object {
|
||||
"contentPaths": Object {
|
||||
"contentPath": "docs",
|
||||
"contentPathLocalized": "i18n/docs-localized",
|
||||
},
|
||||
"filePath": "docs/intro.md",
|
||||
"link": "./foo.md",
|
||||
},
|
||||
Object {
|
||||
"contentPaths": Object {
|
||||
"contentPath": "docs",
|
||||
"contentPathLocalized": "i18n/docs-localized",
|
||||
},
|
||||
"filePath": "docs/intro.md",
|
||||
"link": "./foo.md",
|
||||
},
|
||||
],
|
||||
"newContent": "
|
||||
<!-- [foo](./foo.md) -->
|
||||
<!--
|
||||
[foo](./foo.md)
|
||||
-->
|
||||
",
|
||||
}
|
||||
`;
|
||||
|
||||
exports[`replaceMarkdownLinks ignores links in fenced blocks 1`] = `
|
||||
Object {
|
||||
"brokenMarkdownLinks": Array [],
|
||||
"newContent": "
|
||||
\`\`\`
|
||||
[foo](foo.md)
|
||||
\`\`\`
|
||||
|
||||
\`\`\`\`js
|
||||
[foo](foo.md)
|
||||
\`\`\`
|
||||
[foo](foo.md)
|
||||
\`\`\`
|
||||
[foo](foo.md)
|
||||
\`\`\`\`
|
||||
|
||||
\`\`\`\`js
|
||||
[foo](foo.md)
|
||||
\`\`\`
|
||||
[foo](foo.md)
|
||||
\`\`\`\`
|
||||
",
|
||||
}
|
||||
`;
|
||||
|
||||
exports[`replaceMarkdownLinks ignores links in inline code 1`] = `
|
||||
Object {
|
||||
"brokenMarkdownLinks": Array [
|
||||
Object {
|
||||
"contentPaths": Object {
|
||||
"contentPath": "docs",
|
||||
"contentPathLocalized": "i18n/docs-localized",
|
||||
},
|
||||
"filePath": "docs/intro.md",
|
||||
"link": "foo.md",
|
||||
},
|
||||
],
|
||||
"newContent": "
|
||||
\`[foo](foo.md)\`
|
||||
",
|
||||
}
|
||||
`;
|
||||
|
||||
exports[`replaceMarkdownLinks replaces links with same title as URL 1`] = `
|
||||
Object {
|
||||
"brokenMarkdownLinks": Array [],
|
||||
"newContent": "
|
||||
[/docs/foo](foo.md)
|
||||
[/docs/foo](./foo.md)
|
||||
[foo.md](/docs/foo)
|
||||
[.//docs/foo](foo.md)
|
||||
",
|
||||
}
|
||||
`;
|
||||
|
||||
exports[`replaceMarkdownLinks replaces multiple links on same line 1`] = `
|
||||
Object {
|
||||
"brokenMarkdownLinks": Array [],
|
||||
"newContent": "
|
||||
[a](/docs/a), [a](/docs/a), [b](/docs/b), [c](/docs/c)
|
||||
",
|
||||
}
|
||||
`;
|
||||
|
||||
exports[`replaceMarkdownLinks replaces reference style Markdown links 1`] = `
|
||||
Object {
|
||||
"brokenMarkdownLinks": Array [],
|
||||
"newContent": "
|
||||
The following operations are defined for [URI]s:
|
||||
|
||||
* [info]: Returns metadata about the resource,
|
||||
* [list]: Returns metadata about the resource's children (like getting the content of a local directory).
|
||||
|
||||
[URI]: /docs/api/classes/uri
|
||||
[info]: /docs/api/classes/uri#info
|
||||
[list]: /docs/api/classes/uri#list
|
||||
",
|
||||
}
|
||||
`;
|
||||
|
|
@ -0,0 +1,200 @@
|
|||
// Jest Snapshot v1, https://goo.gl/fbAQLP
|
||||
|
||||
exports[`parseMarkdownString deletes only first heading 1`] = `
|
||||
Object {
|
||||
"content": "# Markdown Title
|
||||
|
||||
test test test # test bar
|
||||
|
||||
# Markdown Title 2
|
||||
|
||||
### Markdown Title h3",
|
||||
"contentTitle": "Markdown Title",
|
||||
"excerpt": "test test test # test bar",
|
||||
"frontMatter": Object {},
|
||||
}
|
||||
`;
|
||||
|
||||
exports[`parseMarkdownString deletes only first heading 2 1`] = `
|
||||
Object {
|
||||
"content": "# test
|
||||
|
||||
test test test test test test
|
||||
test test test # test bar
|
||||
# test2
|
||||
### test
|
||||
test3",
|
||||
"contentTitle": "test",
|
||||
"excerpt": "test test test test test test",
|
||||
"frontMatter": Object {},
|
||||
}
|
||||
`;
|
||||
|
||||
exports[`parseMarkdownString does not warn for duplicate title if markdown title is not at the top 1`] = `
|
||||
Object {
|
||||
"content": "foo
|
||||
|
||||
# Markdown Title",
|
||||
"contentTitle": undefined,
|
||||
"excerpt": "foo",
|
||||
"frontMatter": Object {
|
||||
"title": "Frontmatter title",
|
||||
},
|
||||
}
|
||||
`;
|
||||
|
||||
exports[`parseMarkdownString handles code blocks 1`] = `
|
||||
Object {
|
||||
"content": "\`\`\`js
|
||||
code
|
||||
\`\`\`
|
||||
|
||||
Content",
|
||||
"contentTitle": undefined,
|
||||
"excerpt": "Content",
|
||||
"frontMatter": Object {},
|
||||
}
|
||||
`;
|
||||
|
||||
exports[`parseMarkdownString handles code blocks 2`] = `
|
||||
Object {
|
||||
"content": "\`\`\`\`js
|
||||
Foo
|
||||
\`\`\`diff
|
||||
code
|
||||
\`\`\`
|
||||
Bar
|
||||
\`\`\`\`
|
||||
|
||||
Content",
|
||||
"contentTitle": undefined,
|
||||
"excerpt": "Content",
|
||||
"frontMatter": Object {},
|
||||
}
|
||||
`;
|
||||
|
||||
exports[`parseMarkdownString handles code blocks 3`] = `
|
||||
Object {
|
||||
"content": "\`\`\`\`js
|
||||
Foo
|
||||
\`\`\`diff
|
||||
code
|
||||
\`\`\`\`
|
||||
|
||||
Content",
|
||||
"contentTitle": undefined,
|
||||
"excerpt": "Content",
|
||||
"frontMatter": Object {},
|
||||
}
|
||||
`;
|
||||
|
||||
exports[`parseMarkdownString ignores markdown title if its not a first text 1`] = `
|
||||
Object {
|
||||
"content": "foo
|
||||
# test",
|
||||
"contentTitle": undefined,
|
||||
"excerpt": "foo",
|
||||
"frontMatter": Object {},
|
||||
}
|
||||
`;
|
||||
|
||||
exports[`parseMarkdownString parse markdown with front matter 1`] = `
|
||||
Object {
|
||||
"content": "Some text",
|
||||
"contentTitle": undefined,
|
||||
"excerpt": "Some text",
|
||||
"frontMatter": Object {
|
||||
"title": "Frontmatter title",
|
||||
},
|
||||
}
|
||||
`;
|
||||
|
||||
exports[`parseMarkdownString parses first heading as contentTitle 1`] = `
|
||||
Object {
|
||||
"content": "# Markdown Title
|
||||
|
||||
Some text",
|
||||
"contentTitle": "Markdown Title",
|
||||
"excerpt": "Some text",
|
||||
"frontMatter": Object {},
|
||||
}
|
||||
`;
|
||||
|
||||
exports[`parseMarkdownString parses front-matter and ignore h2 1`] = `
|
||||
Object {
|
||||
"content": "## test",
|
||||
"contentTitle": undefined,
|
||||
"excerpt": "test",
|
||||
"frontMatter": Object {
|
||||
"title": "Frontmatter title",
|
||||
},
|
||||
}
|
||||
`;
|
||||
|
||||
exports[`parseMarkdownString parses title only 1`] = `
|
||||
Object {
|
||||
"content": "# test",
|
||||
"contentTitle": "test",
|
||||
"excerpt": undefined,
|
||||
"frontMatter": Object {},
|
||||
}
|
||||
`;
|
||||
|
||||
exports[`parseMarkdownString parses title only alternate 1`] = `
|
||||
Object {
|
||||
"content": "test
|
||||
===",
|
||||
"contentTitle": "test",
|
||||
"excerpt": undefined,
|
||||
"frontMatter": Object {},
|
||||
}
|
||||
`;
|
||||
|
||||
exports[`parseMarkdownString reads front matter only 1`] = `
|
||||
Object {
|
||||
"content": "",
|
||||
"contentTitle": undefined,
|
||||
"excerpt": undefined,
|
||||
"frontMatter": Object {
|
||||
"title": "test",
|
||||
},
|
||||
}
|
||||
`;
|
||||
|
||||
exports[`parseMarkdownString warns about duplicate titles (front matter + markdown alternate) 1`] = `
|
||||
Object {
|
||||
"content": "Markdown Title alternate
|
||||
================
|
||||
|
||||
Some text",
|
||||
"contentTitle": "Markdown Title alternate",
|
||||
"excerpt": "Some text",
|
||||
"frontMatter": Object {
|
||||
"title": "Frontmatter title",
|
||||
},
|
||||
}
|
||||
`;
|
||||
|
||||
exports[`parseMarkdownString warns about duplicate titles (front matter + markdown) 1`] = `
|
||||
Object {
|
||||
"content": "# Markdown Title
|
||||
|
||||
Some text",
|
||||
"contentTitle": "Markdown Title",
|
||||
"excerpt": "Some text",
|
||||
"frontMatter": Object {
|
||||
"title": "Frontmatter title",
|
||||
},
|
||||
}
|
||||
`;
|
||||
|
||||
exports[`parseMarkdownString warns about duplicate titles 1`] = `
|
||||
Object {
|
||||
"content": "# test",
|
||||
"contentTitle": "test",
|
||||
"excerpt": undefined,
|
||||
"frontMatter": Object {
|
||||
"title": "Frontmatter title",
|
||||
},
|
||||
}
|
||||
`;
|
||||
|
|
@ -22,7 +22,7 @@ describe('getDataFilePath', () => {
|
|||
const contentPathEmpty = path.join(fixturesDir, 'contentPathEmpty');
|
||||
const contentPathNestedYml = path.join(fixturesDir, 'contentPathNestedYml');
|
||||
|
||||
test('getDataFilePath returns localized Yml path in priority', async () => {
|
||||
it('getDataFilePath returns localized Yml path in priority', async () => {
|
||||
await expect(
|
||||
getDataFilePath({
|
||||
filePath: 'authors.yml',
|
||||
|
|
@ -43,7 +43,7 @@ describe('getDataFilePath', () => {
|
|||
).resolves.toEqual(path.join(contentPathYml2, 'authors.yml'));
|
||||
});
|
||||
|
||||
test('getDataFilePath returns localized Json path in priority', async () => {
|
||||
it('getDataFilePath returns localized Json path in priority', async () => {
|
||||
await expect(
|
||||
getDataFilePath({
|
||||
filePath: 'authors.json',
|
||||
|
|
@ -64,7 +64,7 @@ describe('getDataFilePath', () => {
|
|||
).resolves.toEqual(path.join(contentPathJson2, 'authors.json'));
|
||||
});
|
||||
|
||||
test('getDataFilePath returns unlocalized Yml path as fallback', async () => {
|
||||
it('getDataFilePath returns unlocalized Yml path as fallback', async () => {
|
||||
await expect(
|
||||
getDataFilePath({
|
||||
filePath: 'authors.yml',
|
||||
|
|
@ -76,7 +76,7 @@ describe('getDataFilePath', () => {
|
|||
).resolves.toEqual(path.join(contentPathYml2, 'authors.yml'));
|
||||
});
|
||||
|
||||
test('getDataFilePath returns unlocalized Json path as fallback', async () => {
|
||||
it('getDataFilePath returns unlocalized Json path as fallback', async () => {
|
||||
await expect(
|
||||
getDataFilePath({
|
||||
filePath: 'authors.json',
|
||||
|
|
@ -88,7 +88,7 @@ describe('getDataFilePath', () => {
|
|||
).resolves.toEqual(path.join(contentPathJson1, 'authors.json'));
|
||||
});
|
||||
|
||||
test('getDataFilePath can return undefined (file not found)', async () => {
|
||||
it('getDataFilePath can return undefined (file not found)', async () => {
|
||||
await expect(
|
||||
getDataFilePath({
|
||||
filePath: 'authors.json',
|
||||
|
|
@ -109,7 +109,7 @@ describe('getDataFilePath', () => {
|
|||
).resolves.toBeUndefined();
|
||||
});
|
||||
|
||||
test('getDataFilePath can return nested path', async () => {
|
||||
it('getDataFilePath can return nested path', async () => {
|
||||
await expect(
|
||||
getDataFilePath({
|
||||
filePath: 'sub/folder/authors.yml',
|
||||
|
|
@ -143,25 +143,25 @@ describe('getDataFileData', () => {
|
|||
);
|
||||
}
|
||||
|
||||
test('returns undefined for nonexistent file', async () => {
|
||||
it('returns undefined for nonexistent file', async () => {
|
||||
await expect(readDataFile('nonexistent.yml')).resolves.toBeUndefined();
|
||||
});
|
||||
|
||||
test('read valid yml author file', async () => {
|
||||
it('read valid yml author file', async () => {
|
||||
await expect(readDataFile('valid.yml')).resolves.toEqual({a: 1});
|
||||
});
|
||||
|
||||
test('read valid json author file', async () => {
|
||||
it('read valid json author file', async () => {
|
||||
await expect(readDataFile('valid.json')).resolves.toEqual({a: 1});
|
||||
});
|
||||
|
||||
test('fail to read invalid yml', async () => {
|
||||
it('fail to read invalid yml', async () => {
|
||||
await expect(
|
||||
readDataFile('bad.yml'),
|
||||
).rejects.toThrowErrorMatchingInlineSnapshot(`"Nope"`);
|
||||
});
|
||||
|
||||
test('fail to read invalid json', async () => {
|
||||
it('fail to read invalid json', async () => {
|
||||
await expect(
|
||||
readDataFile('bad.json'),
|
||||
).rejects.toThrowErrorMatchingInlineSnapshot(`"Nope"`);
|
||||
|
|
@ -169,7 +169,7 @@ describe('getDataFileData', () => {
|
|||
});
|
||||
|
||||
describe('findFolderContainingFile', () => {
|
||||
test('find appropriate folder', async () => {
|
||||
it('find appropriate folder', async () => {
|
||||
await expect(
|
||||
findFolderContainingFile(
|
||||
['/abcdef', '/gehij', __dirname, '/klmn'],
|
||||
|
|
@ -178,7 +178,7 @@ describe('findFolderContainingFile', () => {
|
|||
).resolves.toEqual(__dirname);
|
||||
});
|
||||
|
||||
test('return undefined if no folder contain such file', async () => {
|
||||
it('return undefined if no folder contain such file', async () => {
|
||||
await expect(
|
||||
findFolderContainingFile(['/abcdef', '/gehij', '/klmn'], 'index.test.ts'),
|
||||
).resolves.toBeUndefined();
|
||||
|
|
@ -186,7 +186,7 @@ describe('findFolderContainingFile', () => {
|
|||
});
|
||||
|
||||
describe('getFolderContainingFile', () => {
|
||||
test('get appropriate folder', async () => {
|
||||
it('get appropriate folder', async () => {
|
||||
await expect(
|
||||
getFolderContainingFile(
|
||||
['/abcdef', '/gehij', __dirname, '/klmn'],
|
||||
|
|
@ -195,7 +195,7 @@ describe('getFolderContainingFile', () => {
|
|||
).resolves.toEqual(__dirname);
|
||||
});
|
||||
|
||||
test('throw if no folder contain such file', async () => {
|
||||
it('throw if no folder contain such file', async () => {
|
||||
await expect(
|
||||
getFolderContainingFile(
|
||||
['/abcdef', '/gehij', '/klmn'],
|
||||
|
|
|
|||
|
|
@ -10,53 +10,57 @@ import {genChunkName, readOutputHTMLFile, generate} from '../emitUtils';
|
|||
import path from 'path';
|
||||
import fs from 'fs-extra';
|
||||
|
||||
test('genChunkName', () => {
|
||||
const firstAssert: Record<string, string> = {
|
||||
'/docs/adding-blog': 'docs-adding-blog-062',
|
||||
'/docs/versioning': 'docs-versioning-8a8',
|
||||
'/': 'index',
|
||||
'/blog/2018/04/30/How-I-Converted-Profilo-To-Docusaurus':
|
||||
'blog-2018-04-30-how-i-converted-profilo-to-docusaurus-4f2',
|
||||
'/youtube': 'youtube-429',
|
||||
'/users/en/': 'users-en-f7a',
|
||||
'/blog': 'blog-c06',
|
||||
};
|
||||
Object.keys(firstAssert).forEach((str) => {
|
||||
expect(genChunkName(str)).toBe(firstAssert[str]);
|
||||
describe('genChunkName', () => {
|
||||
it('works', () => {
|
||||
const firstAssert: Record<string, string> = {
|
||||
'/docs/adding-blog': 'docs-adding-blog-062',
|
||||
'/docs/versioning': 'docs-versioning-8a8',
|
||||
'/': 'index',
|
||||
'/blog/2018/04/30/How-I-Converted-Profilo-To-Docusaurus':
|
||||
'blog-2018-04-30-how-i-converted-profilo-to-docusaurus-4f2',
|
||||
'/youtube': 'youtube-429',
|
||||
'/users/en/': 'users-en-f7a',
|
||||
'/blog': 'blog-c06',
|
||||
};
|
||||
Object.keys(firstAssert).forEach((str) => {
|
||||
expect(genChunkName(str)).toBe(firstAssert[str]);
|
||||
});
|
||||
});
|
||||
|
||||
// Don't allow different chunk name for same path.
|
||||
expect(genChunkName('path/is/similar', 'oldPrefix')).toEqual(
|
||||
genChunkName('path/is/similar', 'newPrefix'),
|
||||
);
|
||||
|
||||
// Even with same preferred name, still different chunk name for
|
||||
// different path
|
||||
const secondAssert: Record<string, string> = {
|
||||
'/blog/1': 'blog-85-f-089',
|
||||
'/blog/2': 'blog-353-489',
|
||||
};
|
||||
Object.keys(secondAssert).forEach((str) => {
|
||||
expect(genChunkName(str, undefined, 'blog')).toBe(secondAssert[str]);
|
||||
});
|
||||
|
||||
// Only generate short unique id
|
||||
const thirdAssert: Record<string, string> = {
|
||||
a: '0cc175b9',
|
||||
b: '92eb5ffe',
|
||||
c: '4a8a08f0',
|
||||
d: '8277e091',
|
||||
};
|
||||
Object.keys(thirdAssert).forEach((str) => {
|
||||
expect(genChunkName(str, undefined, undefined, true)).toBe(
|
||||
thirdAssert[str],
|
||||
it("doesn't allow different chunk name for same path", () => {
|
||||
expect(genChunkName('path/is/similar', 'oldPrefix')).toEqual(
|
||||
genChunkName('path/is/similar', 'newPrefix'),
|
||||
);
|
||||
});
|
||||
expect(genChunkName('d', undefined, undefined, true)).toBe('8277e091');
|
||||
|
||||
it('emits different chunk names for different paths even with same preferred name', () => {
|
||||
const secondAssert: Record<string, string> = {
|
||||
'/blog/1': 'blog-85-f-089',
|
||||
'/blog/2': 'blog-353-489',
|
||||
};
|
||||
Object.keys(secondAssert).forEach((str) => {
|
||||
expect(genChunkName(str, undefined, 'blog')).toBe(secondAssert[str]);
|
||||
});
|
||||
});
|
||||
|
||||
it('only generates short unique IDs', () => {
|
||||
const thirdAssert: Record<string, string> = {
|
||||
a: '0cc175b9',
|
||||
b: '92eb5ffe',
|
||||
c: '4a8a08f0',
|
||||
d: '8277e091',
|
||||
};
|
||||
Object.keys(thirdAssert).forEach((str) => {
|
||||
expect(genChunkName(str, undefined, undefined, true)).toBe(
|
||||
thirdAssert[str],
|
||||
);
|
||||
});
|
||||
expect(genChunkName('d', undefined, undefined, true)).toBe('8277e091');
|
||||
});
|
||||
});
|
||||
|
||||
describe('readOutputHTMLFile', () => {
|
||||
test('trailing slash undefined', async () => {
|
||||
it('trailing slash undefined', async () => {
|
||||
await expect(
|
||||
readOutputHTMLFile(
|
||||
'/file',
|
||||
|
|
@ -86,7 +90,7 @@ describe('readOutputHTMLFile', () => {
|
|||
).then(String),
|
||||
).resolves.toEqual('folder\n');
|
||||
});
|
||||
test('trailing slash true', async () => {
|
||||
it('trailing slash true', async () => {
|
||||
await expect(
|
||||
readOutputHTMLFile(
|
||||
'/folder',
|
||||
|
|
@ -102,7 +106,7 @@ describe('readOutputHTMLFile', () => {
|
|||
).then(String),
|
||||
).resolves.toEqual('folder\n');
|
||||
});
|
||||
test('trailing slash false', async () => {
|
||||
it('trailing slash false', async () => {
|
||||
await expect(
|
||||
readOutputHTMLFile(
|
||||
'/file',
|
||||
|
|
@ -120,36 +124,40 @@ describe('readOutputHTMLFile', () => {
|
|||
});
|
||||
});
|
||||
|
||||
test('generate', async () => {
|
||||
describe('generate', () => {
|
||||
const writeMock = jest.spyOn(fs, 'outputFile').mockImplementation(() => {});
|
||||
const existsMock = jest.spyOn(fs, 'pathExists');
|
||||
const readMock = jest.spyOn(fs, 'readFile');
|
||||
|
||||
// First call: no file, no cache
|
||||
existsMock.mockImplementationOnce(() => false);
|
||||
await generate(__dirname, 'foo', 'bar');
|
||||
expect(writeMock).toHaveBeenNthCalledWith(
|
||||
1,
|
||||
path.join(__dirname, 'foo'),
|
||||
'bar',
|
||||
);
|
||||
it('works with no file and no cache', async () => {
|
||||
existsMock.mockImplementationOnce(() => false);
|
||||
await generate(__dirname, 'foo', 'bar');
|
||||
expect(writeMock).toHaveBeenNthCalledWith(
|
||||
1,
|
||||
path.join(__dirname, 'foo'),
|
||||
'bar',
|
||||
);
|
||||
});
|
||||
|
||||
// Second call: cache exists
|
||||
await generate(__dirname, 'foo', 'bar');
|
||||
expect(writeMock).toBeCalledTimes(1);
|
||||
it('works with existing cache', async () => {
|
||||
await generate(__dirname, 'foo', 'bar');
|
||||
expect(writeMock).toBeCalledTimes(1);
|
||||
});
|
||||
|
||||
// Generate another: file exists, cache doesn't
|
||||
existsMock.mockImplementationOnce(() => true);
|
||||
// @ts-expect-error: seems the typedef doesn't understand overload
|
||||
readMock.mockImplementationOnce(() => Promise.resolve('bar'));
|
||||
await generate(__dirname, 'baz', 'bar');
|
||||
expect(writeMock).toBeCalledTimes(1);
|
||||
it('works with existing file but no cache', async () => {
|
||||
existsMock.mockImplementationOnce(() => true);
|
||||
// @ts-expect-error: seems the typedef doesn't understand overload
|
||||
readMock.mockImplementationOnce(() => Promise.resolve('bar'));
|
||||
await generate(__dirname, 'baz', 'bar');
|
||||
expect(writeMock).toBeCalledTimes(1);
|
||||
});
|
||||
|
||||
// Generate again: force skip cache
|
||||
await generate(__dirname, 'foo', 'bar', true);
|
||||
expect(writeMock).toHaveBeenNthCalledWith(
|
||||
2,
|
||||
path.join(__dirname, 'foo'),
|
||||
'bar',
|
||||
);
|
||||
it('works when force skipping cache', async () => {
|
||||
await generate(__dirname, 'foo', 'bar', true);
|
||||
expect(writeMock).toHaveBeenNthCalledWith(
|
||||
2,
|
||||
path.join(__dirname, 'foo'),
|
||||
'bar',
|
||||
);
|
||||
});
|
||||
});
|
||||
|
|
|
|||
|
|
@ -14,7 +14,7 @@ import {
|
|||
describe('createMatcher', () => {
|
||||
const matcher = createMatcher(GlobExcludeDefault);
|
||||
|
||||
test('match default exclude MD/MDX partials correctly', () => {
|
||||
it('match default exclude MD/MDX partials correctly', () => {
|
||||
expect(matcher('doc.md')).toEqual(false);
|
||||
expect(matcher('category/doc.md')).toEqual(false);
|
||||
expect(matcher('category/subcategory/doc.md')).toEqual(false);
|
||||
|
|
@ -31,7 +31,7 @@ describe('createMatcher', () => {
|
|||
expect(matcher('category/_subcategory/doc.md')).toEqual(true);
|
||||
});
|
||||
|
||||
test('match default exclude tests correctly', () => {
|
||||
it('match default exclude tests correctly', () => {
|
||||
expect(matcher('xyz.js')).toEqual(false);
|
||||
expect(matcher('xyz.ts')).toEqual(false);
|
||||
expect(matcher('xyz.jsx')).toEqual(false);
|
||||
|
|
@ -73,7 +73,7 @@ describe('createAbsoluteFilePathMatcher', () => {
|
|||
rootFolders,
|
||||
);
|
||||
|
||||
test('match default exclude MD/MDX partials correctly', () => {
|
||||
it('match default exclude MD/MDX partials correctly', () => {
|
||||
expect(matcher('/_root/docs/myDoc.md')).toEqual(false);
|
||||
expect(matcher('/_root/docs/myDoc.mdx')).toEqual(false);
|
||||
expect(matcher('/root/_docs/myDoc.md')).toEqual(false);
|
||||
|
|
@ -93,13 +93,13 @@ describe('createAbsoluteFilePathMatcher', () => {
|
|||
expect(matcher('/root/_docs/_category/myDoc.mdx')).toEqual(true);
|
||||
});
|
||||
|
||||
test('match default exclude tests correctly', () => {
|
||||
it('match default exclude tests correctly', () => {
|
||||
expect(matcher('/__test__/website/src/xyz.js')).toEqual(false);
|
||||
expect(matcher('/__test__/website/src/__test__/xyz.js')).toEqual(true);
|
||||
expect(matcher('/__test__/website/src/xyz.test.js')).toEqual(true);
|
||||
});
|
||||
|
||||
test('throw if file is not contained in any root doc', () => {
|
||||
it('throw if file is not contained in any root doc', () => {
|
||||
expect(() =>
|
||||
matcher('/bad/path/myDoc.md'),
|
||||
).toThrowErrorMatchingInlineSnapshot(
|
||||
|
|
|
|||
|
|
@ -8,7 +8,7 @@
|
|||
import {simpleHash, docuHash} from '../hashUtils';
|
||||
|
||||
describe('hashUtils', () => {
|
||||
test('simpleHash', () => {
|
||||
it('simpleHash', () => {
|
||||
const asserts: Record<string, string> = {
|
||||
'': 'd41',
|
||||
'/foo-bar': '096',
|
||||
|
|
@ -29,7 +29,7 @@ describe('hashUtils', () => {
|
|||
});
|
||||
|
||||
describe('docuHash', () => {
|
||||
test('docuHash works', () => {
|
||||
it('docuHash works', () => {
|
||||
const asserts: Record<string, string> = {
|
||||
'': '-d41',
|
||||
'/': 'index',
|
||||
|
|
|
|||
|
|
@ -11,52 +11,56 @@ import {
|
|||
getPluginI18nPath,
|
||||
} from '../i18nUtils';
|
||||
|
||||
test('mergeTranslations', () => {
|
||||
expect(
|
||||
mergeTranslations([
|
||||
{
|
||||
T1: {message: 'T1 message', description: 'T1 desc'},
|
||||
T2: {message: 'T2 message', description: 'T2 desc'},
|
||||
T3: {message: 'T3 message', description: 'T3 desc'},
|
||||
},
|
||||
{
|
||||
T4: {message: 'T4 message', description: 'T4 desc'},
|
||||
},
|
||||
{T2: {message: 'T2 message 2', description: 'T2 desc 2'}},
|
||||
]),
|
||||
).toEqual({
|
||||
T1: {message: 'T1 message', description: 'T1 desc'},
|
||||
T2: {message: 'T2 message 2', description: 'T2 desc 2'},
|
||||
T3: {message: 'T3 message', description: 'T3 desc'},
|
||||
T4: {message: 'T4 message', description: 'T4 desc'},
|
||||
describe('mergeTranslations', () => {
|
||||
it('works', () => {
|
||||
expect(
|
||||
mergeTranslations([
|
||||
{
|
||||
T1: {message: 'T1 message', description: 'T1 desc'},
|
||||
T2: {message: 'T2 message', description: 'T2 desc'},
|
||||
T3: {message: 'T3 message', description: 'T3 desc'},
|
||||
},
|
||||
{
|
||||
T4: {message: 'T4 message', description: 'T4 desc'},
|
||||
},
|
||||
{T2: {message: 'T2 message 2', description: 'T2 desc 2'}},
|
||||
]),
|
||||
).toEqual({
|
||||
T1: {message: 'T1 message', description: 'T1 desc'},
|
||||
T2: {message: 'T2 message 2', description: 'T2 desc 2'},
|
||||
T3: {message: 'T3 message', description: 'T3 desc'},
|
||||
T4: {message: 'T4 message', description: 'T4 desc'},
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
test('updateTranslationFileMessages', () => {
|
||||
expect(
|
||||
updateTranslationFileMessages(
|
||||
{
|
||||
path: 'abc',
|
||||
content: {
|
||||
t1: {message: 't1 message', description: 't1 desc'},
|
||||
t2: {message: 't2 message', description: 't2 desc'},
|
||||
t3: {message: 't3 message', description: 't3 desc'},
|
||||
describe('updateTranslationFileMessages', () => {
|
||||
it('works', () => {
|
||||
expect(
|
||||
updateTranslationFileMessages(
|
||||
{
|
||||
path: 'abc',
|
||||
content: {
|
||||
t1: {message: 't1 message', description: 't1 desc'},
|
||||
t2: {message: 't2 message', description: 't2 desc'},
|
||||
t3: {message: 't3 message', description: 't3 desc'},
|
||||
},
|
||||
},
|
||||
(message) => `prefix ${message} suffix`,
|
||||
),
|
||||
).toEqual({
|
||||
path: 'abc',
|
||||
content: {
|
||||
t1: {message: 'prefix t1 message suffix', description: 't1 desc'},
|
||||
t2: {message: 'prefix t2 message suffix', description: 't2 desc'},
|
||||
t3: {message: 'prefix t3 message suffix', description: 't3 desc'},
|
||||
},
|
||||
(message) => `prefix ${message} suffix`,
|
||||
),
|
||||
).toEqual({
|
||||
path: 'abc',
|
||||
content: {
|
||||
t1: {message: 'prefix t1 message suffix', description: 't1 desc'},
|
||||
t2: {message: 'prefix t2 message suffix', description: 't2 desc'},
|
||||
t3: {message: 'prefix t3 message suffix', description: 't3 desc'},
|
||||
},
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('getPluginI18nPath', () => {
|
||||
test('gets correct path', () => {
|
||||
it('gets correct path', () => {
|
||||
expect(
|
||||
getPluginI18nPath({
|
||||
siteDir: __dirname,
|
||||
|
|
@ -69,7 +73,7 @@ describe('getPluginI18nPath', () => {
|
|||
`"<PROJECT_ROOT>/packages/docusaurus-utils/src/__tests__/i18n/zh-Hans/plugin-content-docs-community/foo"`,
|
||||
);
|
||||
});
|
||||
test('gets correct path for default plugin', () => {
|
||||
it('gets correct path for default plugin', () => {
|
||||
expect(
|
||||
getPluginI18nPath({
|
||||
siteDir: __dirname,
|
||||
|
|
@ -79,7 +83,7 @@ describe('getPluginI18nPath', () => {
|
|||
}).replace(__dirname, ''),
|
||||
).toMatchInlineSnapshot(`"/i18n/zh-Hans/plugin-content-docs/foo"`);
|
||||
});
|
||||
test('gets correct path when no subpaths', () => {
|
||||
it('gets correct path when no subpaths', () => {
|
||||
expect(
|
||||
getPluginI18nPath({
|
||||
siteDir: __dirname,
|
||||
|
|
|
|||
|
|
@ -17,37 +17,29 @@ import {
|
|||
import _ from 'lodash';
|
||||
|
||||
describe('removeSuffix', () => {
|
||||
test('should no-op 1', () => {
|
||||
it("is no-op when suffix doesn't exist", () => {
|
||||
expect(removeSuffix('abcdef', 'ijk')).toEqual('abcdef');
|
||||
});
|
||||
test('should no-op 2', () => {
|
||||
expect(removeSuffix('abcdef', 'abc')).toEqual('abcdef');
|
||||
});
|
||||
test('should no-op 3', () => {
|
||||
expect(removeSuffix('abcdef', '')).toEqual('abcdef');
|
||||
});
|
||||
test('should remove suffix', () => {
|
||||
it('removes suffix', () => {
|
||||
expect(removeSuffix('abcdef', 'ef')).toEqual('abcd');
|
||||
});
|
||||
});
|
||||
|
||||
describe('removePrefix', () => {
|
||||
test('should no-op 1', () => {
|
||||
it("is no-op when prefix doesn't exist", () => {
|
||||
expect(removePrefix('abcdef', 'ijk')).toEqual('abcdef');
|
||||
});
|
||||
test('should no-op 2', () => {
|
||||
expect(removePrefix('abcdef', 'def')).toEqual('abcdef');
|
||||
});
|
||||
test('should no-op 3', () => {
|
||||
expect(removePrefix('abcdef', '')).toEqual('abcdef');
|
||||
});
|
||||
test('should remove prefix', () => {
|
||||
it('removes prefix', () => {
|
||||
expect(removePrefix('abcdef', 'ab')).toEqual('cdef');
|
||||
});
|
||||
});
|
||||
|
||||
describe('getElementsAround', () => {
|
||||
test('can return elements around', () => {
|
||||
it('returns elements around', () => {
|
||||
expect(getElementsAround(['a', 'b', 'c', 'd'], 0)).toEqual({
|
||||
previous: undefined,
|
||||
next: 'b',
|
||||
|
|
@ -66,7 +58,7 @@ describe('getElementsAround', () => {
|
|||
});
|
||||
});
|
||||
|
||||
test('throws if bad index is provided', () => {
|
||||
it('throws if bad index is provided', () => {
|
||||
expect(() =>
|
||||
getElementsAround(['a', 'b', 'c', 'd'], -1),
|
||||
).toThrowErrorMatchingInlineSnapshot(
|
||||
|
|
@ -87,7 +79,7 @@ describe('mapAsyncSequential', () => {
|
|||
});
|
||||
}
|
||||
|
||||
test('map sequentially', async () => {
|
||||
it('maps sequentially', async () => {
|
||||
const itemToTimeout: Record<string, number> = {
|
||||
'1': 200,
|
||||
'2': 600,
|
||||
|
|
@ -132,7 +124,7 @@ describe('findAsyncSequential', () => {
|
|||
});
|
||||
}
|
||||
|
||||
test('find sequentially', async () => {
|
||||
it('finds sequentially', async () => {
|
||||
const items = ['1', '2', '3'];
|
||||
|
||||
const findFn = jest.fn(async (item: string) => {
|
||||
|
|
@ -155,7 +147,7 @@ describe('findAsyncSequential', () => {
|
|||
});
|
||||
|
||||
describe('reportMessage', () => {
|
||||
test('all severities', () => {
|
||||
it('works with all severities', () => {
|
||||
const consoleLog = jest.spyOn(console, 'info').mockImplementation(() => {});
|
||||
const consoleWarn = jest
|
||||
.spyOn(console, 'warn')
|
||||
|
|
|
|||
|
|
@ -8,7 +8,7 @@
|
|||
import {replaceMarkdownLinks} from '../markdownLinks';
|
||||
|
||||
describe('replaceMarkdownLinks', () => {
|
||||
test('basic replace', () => {
|
||||
it('does basic replace', () => {
|
||||
expect(
|
||||
replaceMarkdownLinks({
|
||||
siteDir: '.',
|
||||
|
|
@ -35,34 +35,10 @@ describe('replaceMarkdownLinks', () => {
|
|||
[nonexistent](hmmm.md)
|
||||
`,
|
||||
}),
|
||||
).toMatchInlineSnapshot(`
|
||||
Object {
|
||||
"brokenMarkdownLinks": Array [
|
||||
Object {
|
||||
"contentPaths": Object {
|
||||
"contentPath": "docs",
|
||||
"contentPathLocalized": "i18n/docs-localized",
|
||||
},
|
||||
"filePath": "docs/intro.md",
|
||||
"link": "hmmm.md",
|
||||
},
|
||||
],
|
||||
"newContent": "
|
||||
[foo](/doc/foo)
|
||||
[baz](/doc/baz)
|
||||
[foo](/doc/foo)
|
||||
[http](http://github.com/facebook/docusaurus/README.md)
|
||||
[https](https://github.com/facebook/docusaurus/README.md)
|
||||
[asset](./foo.js)
|
||||
[asset as well](@site/docs/_partial.md)
|
||||
[looks like http...](/doc/http)
|
||||
[nonexistent](hmmm.md)
|
||||
",
|
||||
}
|
||||
`);
|
||||
).toMatchSnapshot();
|
||||
});
|
||||
|
||||
test('reference style Markdown links', () => {
|
||||
it('replaces reference style Markdown links', () => {
|
||||
expect(
|
||||
replaceMarkdownLinks({
|
||||
siteDir: '.',
|
||||
|
|
@ -88,25 +64,11 @@ The following operations are defined for [URI]s:
|
|||
[list]: ../api/classes/divine_uri.URI.md#list
|
||||
`,
|
||||
}),
|
||||
).toMatchInlineSnapshot(`
|
||||
Object {
|
||||
"brokenMarkdownLinks": Array [],
|
||||
"newContent": "
|
||||
The following operations are defined for [URI]s:
|
||||
|
||||
* [info]: Returns metadata about the resource,
|
||||
* [list]: Returns metadata about the resource's children (like getting the content of a local directory).
|
||||
|
||||
[URI]: /docs/api/classes/uri
|
||||
[info]: /docs/api/classes/uri#info
|
||||
[list]: /docs/api/classes/uri#list
|
||||
",
|
||||
}
|
||||
`);
|
||||
).toMatchSnapshot();
|
||||
});
|
||||
|
||||
// TODO bad
|
||||
test('links in HTML comments', () => {
|
||||
it('ignores links in HTML comments', () => {
|
||||
expect(
|
||||
replaceMarkdownLinks({
|
||||
siteDir: '.',
|
||||
|
|
@ -125,37 +87,10 @@ The following operations are defined for [URI]s:
|
|||
-->
|
||||
`,
|
||||
}),
|
||||
).toMatchInlineSnapshot(`
|
||||
Object {
|
||||
"brokenMarkdownLinks": Array [
|
||||
Object {
|
||||
"contentPaths": Object {
|
||||
"contentPath": "docs",
|
||||
"contentPathLocalized": "i18n/docs-localized",
|
||||
},
|
||||
"filePath": "docs/intro.md",
|
||||
"link": "./foo.md",
|
||||
},
|
||||
Object {
|
||||
"contentPaths": Object {
|
||||
"contentPath": "docs",
|
||||
"contentPathLocalized": "i18n/docs-localized",
|
||||
},
|
||||
"filePath": "docs/intro.md",
|
||||
"link": "./foo.md",
|
||||
},
|
||||
],
|
||||
"newContent": "
|
||||
<!-- [foo](./foo.md) -->
|
||||
<!--
|
||||
[foo](./foo.md)
|
||||
-->
|
||||
",
|
||||
}
|
||||
`);
|
||||
).toMatchSnapshot();
|
||||
});
|
||||
|
||||
test('links in fenced blocks', () => {
|
||||
it('ignores links in fenced blocks', () => {
|
||||
expect(
|
||||
replaceMarkdownLinks({
|
||||
siteDir: '.',
|
||||
|
|
@ -187,34 +122,11 @@ The following operations are defined for [URI]s:
|
|||
\`\`\`\`
|
||||
`,
|
||||
}),
|
||||
).toMatchInlineSnapshot(`
|
||||
Object {
|
||||
"brokenMarkdownLinks": Array [],
|
||||
"newContent": "
|
||||
\`\`\`
|
||||
[foo](foo.md)
|
||||
\`\`\`
|
||||
|
||||
\`\`\`\`js
|
||||
[foo](foo.md)
|
||||
\`\`\`
|
||||
[foo](foo.md)
|
||||
\`\`\`
|
||||
[foo](foo.md)
|
||||
\`\`\`\`
|
||||
|
||||
\`\`\`\`js
|
||||
[foo](foo.md)
|
||||
\`\`\`
|
||||
[foo](foo.md)
|
||||
\`\`\`\`
|
||||
",
|
||||
}
|
||||
`);
|
||||
).toMatchSnapshot();
|
||||
});
|
||||
|
||||
// TODO bad
|
||||
test('links in inline code', () => {
|
||||
it('ignores links in inline code', () => {
|
||||
expect(
|
||||
replaceMarkdownLinks({
|
||||
siteDir: '.',
|
||||
|
|
@ -230,27 +142,11 @@ The following operations are defined for [URI]s:
|
|||
\`[foo](foo.md)\`
|
||||
`,
|
||||
}),
|
||||
).toMatchInlineSnapshot(`
|
||||
Object {
|
||||
"brokenMarkdownLinks": Array [
|
||||
Object {
|
||||
"contentPaths": Object {
|
||||
"contentPath": "docs",
|
||||
"contentPathLocalized": "i18n/docs-localized",
|
||||
},
|
||||
"filePath": "docs/intro.md",
|
||||
"link": "foo.md",
|
||||
},
|
||||
],
|
||||
"newContent": "
|
||||
\`[foo](foo.md)\`
|
||||
",
|
||||
}
|
||||
`);
|
||||
).toMatchSnapshot();
|
||||
});
|
||||
|
||||
// TODO bad
|
||||
test('links with same title as URL', () => {
|
||||
it('replaces links with same title as URL', () => {
|
||||
expect(
|
||||
replaceMarkdownLinks({
|
||||
siteDir: '.',
|
||||
|
|
@ -270,20 +166,10 @@ The following operations are defined for [URI]s:
|
|||
[./foo.md](foo.md)
|
||||
`,
|
||||
}),
|
||||
).toMatchInlineSnapshot(`
|
||||
Object {
|
||||
"brokenMarkdownLinks": Array [],
|
||||
"newContent": "
|
||||
[/docs/foo](foo.md)
|
||||
[/docs/foo](./foo.md)
|
||||
[foo.md](/docs/foo)
|
||||
[.//docs/foo](foo.md)
|
||||
",
|
||||
}
|
||||
`);
|
||||
).toMatchSnapshot();
|
||||
});
|
||||
|
||||
test('multiple links on same line', () => {
|
||||
it('replaces multiple links on same line', () => {
|
||||
expect(
|
||||
replaceMarkdownLinks({
|
||||
siteDir: '.',
|
||||
|
|
@ -302,13 +188,6 @@ The following operations are defined for [URI]s:
|
|||
[a](a.md), [a](a.md), [b](b.md), [c](c.md)
|
||||
`,
|
||||
}),
|
||||
).toMatchInlineSnapshot(`
|
||||
Object {
|
||||
"brokenMarkdownLinks": Array [],
|
||||
"newContent": "
|
||||
[a](/docs/a), [a](/docs/a), [b](/docs/b), [c](/docs/c)
|
||||
",
|
||||
}
|
||||
`);
|
||||
).toMatchSnapshot();
|
||||
});
|
||||
});
|
||||
|
|
|
|||
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue