This commit is contained in:
2024-03-22 03:47:51 +05:30
parent 8bcf3d211e
commit 89819f6fe2
28440 changed files with 3211033 additions and 2 deletions

21
node_modules/@docusaurus/utils/LICENSE generated vendored Normal file
View File

@@ -0,0 +1,21 @@
MIT License
Copyright (c) Facebook, Inc. and its affiliates.
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.

3
node_modules/@docusaurus/utils/README.md generated vendored Normal file
View File

@@ -0,0 +1,3 @@
# `@docusaurus/utils`
Node utility functions for Docusaurus packages.

73
node_modules/@docusaurus/utils/lib/constants.d.ts generated vendored Normal file
View File

@@ -0,0 +1,73 @@
/**
* Copyright (c) Facebook, Inc. and its affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree.
*/
/** Node major version, directly read from env. */
export declare const NODE_MAJOR_VERSION: number;
/** Node minor version, directly read from env. */
export declare const NODE_MINOR_VERSION: number;
/** Docusaurus core version. */
export declare const DOCUSAURUS_VERSION: string;
/**
* Can be overridden with cli option `--out-dir`. Code should generally use
* `context.outDir` instead (which is always absolute and localized).
*/
export declare const DEFAULT_BUILD_DIR_NAME = "build";
/**
* Can be overridden with cli option `--config`. Code should generally use
* `context.siteConfigPath` instead (which is always absolute).
*
* This does not have extensions, so that we can substitute different ones
* when resolving the path.
*/
export declare const DEFAULT_CONFIG_FILE_NAME = "docusaurus.config";
/** Can be absolute or relative to site directory. */
export declare const BABEL_CONFIG_FILE_NAME: string;
/**
* Can be absolute or relative to site directory. Code should generally use
* `context.generatedFilesDir` instead (which is always absolute).
*/
export declare const GENERATED_FILES_DIR_NAME: string;
/**
* We would assume all of the site's JS code lives in here and not outside.
* Relative to the site directory.
*/
export declare const SRC_DIR_NAME = "src";
/**
* Can be overridden with `config.staticDirectories`. Code should use
* `context.siteConfig.staticDirectories` instead (which is always absolute).
*/
export declare const DEFAULT_STATIC_DIR_NAME = "static";
/**
* Files here are handled by webpack, hashed (can be cached aggressively).
* Relative to the build output folder.
*/
export declare const OUTPUT_STATIC_ASSETS_DIR_NAME = "assets";
/**
* Components in this directory will receive the `@theme` alias and be able to
* shadow default theme components.
*/
export declare const THEME_PATH = "src/theme";
/**
* All translation-related data live here, relative to site directory. Content
* will be namespaced by locale.
*/
export declare const DEFAULT_I18N_DIR_NAME = "i18n";
/**
* Translations for React code.
*/
export declare const CODE_TRANSLATIONS_FILE_NAME = "code.json";
/** Dev server opens on this port by default. */
export declare const DEFAULT_PORT: number;
/** Default plugin ID. */
export declare const DEFAULT_PLUGIN_ID = "default";
/**
* Allow overriding the limit after which the url loader will no longer inline
* assets.
*
* @see https://github.com/facebook/docusaurus/issues/5493
*/
export declare const WEBPACK_URL_LOADER_LIMIT: string | number;
//# sourceMappingURL=constants.d.ts.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"constants.d.ts","sourceRoot":"","sources":["../src/constants.ts"],"names":[],"mappings":"AAAA;;;;;GAKG;AAEH,kDAAkD;AAClD,eAAO,MAAM,kBAAkB,QAG9B,CAAC;AACF,kDAAkD;AAClD,eAAO,MAAM,kBAAkB,QAG9B,CAAC;AAEF,+BAA+B;AAC/B,eAAO,MAAM,kBAAkB,QAE4B,CAAC;AAE5D;;;GAGG;AACH,eAAO,MAAM,sBAAsB,UAAU,CAAC;AAE9C;;;;;;GAMG;AACH,eAAO,MAAM,wBAAwB,sBAAsB,CAAC;AAE5D,qDAAqD;AACrD,eAAO,MAAM,sBAAsB,QACiC,CAAC;AAErE;;;GAGG;AACH,eAAO,MAAM,wBAAwB,QAC6B,CAAC;AAEnE;;;GAGG;AACH,eAAO,MAAM,YAAY,QAAQ,CAAC;AAElC;;;GAGG;AACH,eAAO,MAAM,uBAAuB,WAAW,CAAC;AAEhD;;;GAGG;AACH,eAAO,MAAM,6BAA6B,WAAW,CAAC;AAEtD;;;GAGG;AACH,eAAO,MAAM,UAAU,cAA0B,CAAC;AAElD;;;GAGG;AACH,eAAO,MAAM,qBAAqB,SAAS,CAAC;AAE5C;;GAEG;AACH,eAAO,MAAM,2BAA2B,cAAc,CAAC;AAEvD,gDAAgD;AAChD,eAAO,MAAM,YAAY,QAEjB,CAAC;AAET,yBAAyB;AACzB,eAAO,MAAM,iBAAiB,YAAY,CAAC;AAE3C;;;;;GAKG;AACH,eAAO,MAAM,wBAAwB,iBACU,CAAC"}

80
node_modules/@docusaurus/utils/lib/constants.js generated vendored Normal file
View File

@@ -0,0 +1,80 @@
"use strict";
/**
* Copyright (c) Facebook, Inc. and its affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree.
*/
Object.defineProperty(exports, "__esModule", { value: true });
exports.WEBPACK_URL_LOADER_LIMIT = exports.DEFAULT_PLUGIN_ID = exports.DEFAULT_PORT = exports.CODE_TRANSLATIONS_FILE_NAME = exports.DEFAULT_I18N_DIR_NAME = exports.THEME_PATH = exports.OUTPUT_STATIC_ASSETS_DIR_NAME = exports.DEFAULT_STATIC_DIR_NAME = exports.SRC_DIR_NAME = exports.GENERATED_FILES_DIR_NAME = exports.BABEL_CONFIG_FILE_NAME = exports.DEFAULT_CONFIG_FILE_NAME = exports.DEFAULT_BUILD_DIR_NAME = exports.DOCUSAURUS_VERSION = exports.NODE_MINOR_VERSION = exports.NODE_MAJOR_VERSION = void 0;
/** Node major version, directly read from env. */
exports.NODE_MAJOR_VERSION = parseInt(process.versions.node.split('.')[0], 10);
/** Node minor version, directly read from env. */
exports.NODE_MINOR_VERSION = parseInt(process.versions.node.split('.')[1], 10);
/** Docusaurus core version. */
exports.DOCUSAURUS_VERSION =
// eslint-disable-next-line global-require, @typescript-eslint/no-var-requires
require('../package.json').version;
/**
* Can be overridden with cli option `--out-dir`. Code should generally use
* `context.outDir` instead (which is always absolute and localized).
*/
exports.DEFAULT_BUILD_DIR_NAME = 'build';
/**
* Can be overridden with cli option `--config`. Code should generally use
* `context.siteConfigPath` instead (which is always absolute).
*
* This does not have extensions, so that we can substitute different ones
* when resolving the path.
*/
exports.DEFAULT_CONFIG_FILE_NAME = 'docusaurus.config';
/** Can be absolute or relative to site directory. */
exports.BABEL_CONFIG_FILE_NAME = process.env.DOCUSAURUS_BABEL_CONFIG_FILE_NAME ?? 'babel.config.js';
/**
* Can be absolute or relative to site directory. Code should generally use
* `context.generatedFilesDir` instead (which is always absolute).
*/
exports.GENERATED_FILES_DIR_NAME = process.env.DOCUSAURUS_GENERATED_FILES_DIR_NAME ?? '.docusaurus';
/**
* We would assume all of the site's JS code lives in here and not outside.
* Relative to the site directory.
*/
exports.SRC_DIR_NAME = 'src';
/**
* Can be overridden with `config.staticDirectories`. Code should use
* `context.siteConfig.staticDirectories` instead (which is always absolute).
*/
exports.DEFAULT_STATIC_DIR_NAME = 'static';
/**
* Files here are handled by webpack, hashed (can be cached aggressively).
* Relative to the build output folder.
*/
exports.OUTPUT_STATIC_ASSETS_DIR_NAME = 'assets';
/**
* Components in this directory will receive the `@theme` alias and be able to
* shadow default theme components.
*/
exports.THEME_PATH = `${exports.SRC_DIR_NAME}/theme`;
/**
* All translation-related data live here, relative to site directory. Content
* will be namespaced by locale.
*/
exports.DEFAULT_I18N_DIR_NAME = 'i18n';
/**
* Translations for React code.
*/
exports.CODE_TRANSLATIONS_FILE_NAME = 'code.json';
/** Dev server opens on this port by default. */
exports.DEFAULT_PORT = process.env.PORT
? parseInt(process.env.PORT, 10)
: 3000;
/** Default plugin ID. */
exports.DEFAULT_PLUGIN_ID = 'default';
/**
* Allow overriding the limit after which the url loader will no longer inline
* assets.
*
* @see https://github.com/facebook/docusaurus/issues/5493
*/
exports.WEBPACK_URL_LOADER_LIMIT = process.env.WEBPACK_URL_LOADER_LIMIT ?? 10000;
//# sourceMappingURL=constants.js.map

1
node_modules/@docusaurus/utils/lib/constants.js.map generated vendored Normal file
View File

@@ -0,0 +1 @@
{"version":3,"file":"constants.js","sourceRoot":"","sources":["../src/constants.ts"],"names":[],"mappings":";AAAA;;;;;GAKG;;;AAEH,kDAAkD;AACrC,QAAA,kBAAkB,GAAG,QAAQ,CACxC,OAAO,CAAC,QAAQ,CAAC,IAAI,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC,CAAC,CAAE,EACpC,EAAE,CACH,CAAC;AACF,kDAAkD;AACrC,QAAA,kBAAkB,GAAG,QAAQ,CACxC,OAAO,CAAC,QAAQ,CAAC,IAAI,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC,CAAC,CAAE,EACpC,EAAE,CACH,CAAC;AAEF,+BAA+B;AAClB,QAAA,kBAAkB;AAC7B,8EAA8E;AAC7E,OAAO,CAAC,iBAAiB,CAAuB,CAAC,OAAO,CAAC;AAE5D;;;GAGG;AACU,QAAA,sBAAsB,GAAG,OAAO,CAAC;AAE9C;;;;;;GAMG;AACU,QAAA,wBAAwB,GAAG,mBAAmB,CAAC;AAE5D,qDAAqD;AACxC,QAAA,sBAAsB,GACjC,OAAO,CAAC,GAAG,CAAC,iCAAiC,IAAI,iBAAiB,CAAC;AAErE;;;GAGG;AACU,QAAA,wBAAwB,GACnC,OAAO,CAAC,GAAG,CAAC,mCAAmC,IAAI,aAAa,CAAC;AAEnE;;;GAGG;AACU,QAAA,YAAY,GAAG,KAAK,CAAC;AAElC;;;GAGG;AACU,QAAA,uBAAuB,GAAG,QAAQ,CAAC;AAEhD;;;GAGG;AACU,QAAA,6BAA6B,GAAG,QAAQ,CAAC;AAEtD;;;GAGG;AACU,QAAA,UAAU,GAAG,GAAG,oBAAY,QAAQ,CAAC;AAElD;;;GAGG;AACU,QAAA,qBAAqB,GAAG,MAAM,CAAC;AAE5C;;GAEG;AACU,QAAA,2BAA2B,GAAG,WAAW,CAAC;AAEvD,gDAAgD;AACnC,QAAA,YAAY,GAAG,OAAO,CAAC,GAAG,CAAC,IAAI;IAC1C,CAAC,CAAC,QAAQ,CAAC,OAAO,CAAC,GAAG,CAAC,IAAI,EAAE,EAAE,CAAC;IAChC,CAAC,CAAC,IAAI,CAAC;AAET,yBAAyB;AACZ,QAAA,iBAAiB,GAAG,SAAS,CAAC;AAE3C;;;;;GAKG;AACU,QAAA,wBAAwB,GACnC,OAAO,CAAC,GAAG,CAAC,wBAAwB,IAAI,KAAK,CAAC"}

View File

@@ -0,0 +1,29 @@
/**
* Copyright (c) Facebook, Inc. and its affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree.
*/
type Env = 'production' | 'development';
/**
* A draft content will not be included in the production build
*/
export declare function isDraft({ frontMatter, env, }: {
frontMatter: {
draft?: boolean;
};
env?: Env;
}): boolean;
/**
* An unlisted content will be included in the production build, but hidden.
* It is excluded from sitemap, has noIndex, does not appear in lists etc...
* Only users having the link can find it.
*/
export declare function isUnlisted({ frontMatter, env, }: {
frontMatter: {
unlisted?: boolean;
};
env?: Env;
}): boolean;
export {};
//# sourceMappingURL=contentVisibilityUtils.d.ts.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"contentVisibilityUtils.d.ts","sourceRoot":"","sources":["../src/contentVisibilityUtils.ts"],"names":[],"mappings":"AAAA;;;;;GAKG;AAEH,KAAK,GAAG,GAAG,YAAY,GAAG,aAAa,CAAC;AAoBxC;;GAEG;AACH,wBAAgB,OAAO,CAAC,EACtB,WAAW,EACX,GAAG,GACJ,EAAE;IACD,WAAW,EAAE;QAAC,KAAK,CAAC,EAAE,OAAO,CAAA;KAAC,CAAC;IAC/B,GAAG,CAAC,EAAE,GAAG,CAAC;CACX,GAAG,OAAO,CAEV;AAED;;;;GAIG;AACH,wBAAgB,UAAU,CAAC,EACzB,WAAW,EACX,GAAG,GACJ,EAAE;IACD,WAAW,EAAE;QAAC,QAAQ,CAAC,EAAE,OAAO,CAAA;KAAC,CAAC;IAClC,GAAG,CAAC,EAAE,GAAG,CAAC;CACX,GAAG,OAAO,CAEV"}

View File

@@ -0,0 +1,39 @@
"use strict";
/**
* Copyright (c) Facebook, Inc. and its affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree.
*/
Object.defineProperty(exports, "__esModule", { value: true });
exports.isUnlisted = exports.isDraft = void 0;
/**
* To easily work on draft/unlisted in dev mode, use this env variable!
* SIMULATE_PRODUCTION_VISIBILITY=true yarn start:website
*/
const simulateProductionVisibility = process.env.SIMULATE_PRODUCTION_VISIBILITY === 'true';
/**
* draft/unlisted is a production-only concept
* In dev it is ignored and all content files are included
*/
function isProduction(env) {
return (simulateProductionVisibility ||
(env ?? process.env.NODE_ENV) === 'production');
}
/**
* A draft content will not be included in the production build
*/
function isDraft({ frontMatter, env, }) {
return (isProduction(env) && frontMatter.draft) ?? false;
}
exports.isDraft = isDraft;
/**
* An unlisted content will be included in the production build, but hidden.
* It is excluded from sitemap, has noIndex, does not appear in lists etc...
* Only users having the link can find it.
*/
function isUnlisted({ frontMatter, env, }) {
return (isProduction(env) && frontMatter.unlisted) ?? false;
}
exports.isUnlisted = isUnlisted;
//# sourceMappingURL=contentVisibilityUtils.js.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"contentVisibilityUtils.js","sourceRoot":"","sources":["../src/contentVisibilityUtils.ts"],"names":[],"mappings":";AAAA;;;;;GAKG;;;AAIH;;;GAGG;AACH,MAAM,4BAA4B,GAChC,OAAO,CAAC,GAAG,CAAC,8BAA8B,KAAK,MAAM,CAAC;AAExD;;;GAGG;AACH,SAAS,YAAY,CAAC,GAAoB;IACxC,OAAO,CACL,4BAA4B;QAC5B,CAAC,GAAG,IAAI,OAAO,CAAC,GAAG,CAAC,QAAQ,CAAC,KAAK,YAAY,CAC/C,CAAC;AACJ,CAAC;AAED;;GAEG;AACH,SAAgB,OAAO,CAAC,EACtB,WAAW,EACX,GAAG,GAIJ;IACC,OAAO,CAAC,YAAY,CAAC,GAAG,CAAC,IAAI,WAAW,CAAC,KAAK,CAAC,IAAI,KAAK,CAAC;AAC3D,CAAC;AARD,0BAQC;AAED;;;;GAIG;AACH,SAAgB,UAAU,CAAC,EACzB,WAAW,EACX,GAAG,GAIJ;IACC,OAAO,CAAC,YAAY,CAAC,GAAG,CAAC,IAAI,WAAW,CAAC,QAAQ,CAAC,IAAI,KAAK,CAAC;AAC9D,CAAC;AARD,gCAQC"}

60
node_modules/@docusaurus/utils/lib/dataFileUtils.d.ts generated vendored Normal file
View File

@@ -0,0 +1,60 @@
/**
* Copyright (c) Facebook, Inc. and its affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree.
*/
import type { ContentPaths } from './markdownLinks';
type DataFileParams = {
/** Path to the potential data file, relative to `contentPaths` */
filePath: string;
/**
* Includes the base path and localized path, both of which are eligible for
* sourcing data files. Both paths should be absolute.
*/
contentPaths: ContentPaths;
};
/**
* Looks for a data file in the potential content paths; loads a localized data
* file in priority.
*
* @returns An absolute path to the data file, or `undefined` if there isn't one.
*/
export declare function getDataFilePath({ filePath, contentPaths, }: DataFileParams): Promise<string | undefined>;
/**
* Looks up for a data file in the content paths, returns the object validated
* and normalized according to the `validate` callback.
*
* @returns `undefined` when file not found
* @throws Throws when validation fails, displaying a helpful context message.
*/
export declare function getDataFileData<T>(params: DataFileParams & {
/** Used for the "The X file looks invalid" message. */
fileType: string;
}, validate: (content: unknown) => T): Promise<T | undefined>;
/**
* Takes the `contentPaths` data structure and returns an ordered path list
* indicating their priorities. For all data, we look in the localized folder
* in priority.
*/
export declare function getContentPathList(contentPaths: ContentPaths): string[];
/**
* @param folderPaths a list of absolute paths.
* @param relativeFilePath file path relative to each `folderPaths`.
* @returns the first folder path in which the file exists, or `undefined` if
* none is found.
*/
export declare function findFolderContainingFile(folderPaths: string[], relativeFilePath: string): Promise<string | undefined>;
/**
* Fail-fast alternative to `findFolderContainingFile`.
*
* @param folderPaths a list of absolute paths.
* @param relativeFilePath file path relative to each `folderPaths`.
* @returns the first folder path in which the file exists.
* @throws Throws if no file can be found. You should use this method only when
* you actually know the file exists (e.g. when the `relativeFilePath` is read
* with a glob and you are just trying to localize it)
*/
export declare function getFolderContainingFile(folderPaths: string[], relativeFilePath: string): Promise<string>;
export {};
//# sourceMappingURL=dataFileUtils.d.ts.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"dataFileUtils.d.ts","sourceRoot":"","sources":["../src/dataFileUtils.ts"],"names":[],"mappings":"AAAA;;;;;GAKG;AAOH,OAAO,KAAK,EAAC,YAAY,EAAC,MAAM,iBAAiB,CAAC;AAElD,KAAK,cAAc,GAAG;IACpB,kEAAkE;IAClE,QAAQ,EAAE,MAAM,CAAC;IACjB;;;OAGG;IACH,YAAY,EAAE,YAAY,CAAC;CAC5B,CAAC;AAEF;;;;;GAKG;AACH,wBAAsB,eAAe,CAAC,EACpC,QAAQ,EACR,YAAY,GACb,EAAE,cAAc,GAAG,OAAO,CAAC,MAAM,GAAG,SAAS,CAAC,CAS9C;AAED;;;;;;GAMG;AACH,wBAAsB,eAAe,CAAC,CAAC,EACrC,MAAM,EAAE,cAAc,GAAG;IACvB,uDAAuD;IACvD,QAAQ,EAAE,MAAM,CAAC;CAClB,EACD,QAAQ,EAAE,CAAC,OAAO,EAAE,OAAO,KAAK,CAAC,GAChC,OAAO,CAAC,CAAC,GAAG,SAAS,CAAC,CAaxB;AAED;;;;GAIG;AACH,wBAAgB,kBAAkB,CAAC,YAAY,EAAE,YAAY,GAAG,MAAM,EAAE,CAEvE;AAED;;;;;GAKG;AACH,wBAAsB,wBAAwB,CAC5C,WAAW,EAAE,MAAM,EAAE,EACrB,gBAAgB,EAAE,MAAM,GACvB,OAAO,CAAC,MAAM,GAAG,SAAS,CAAC,CAI7B;AAED;;;;;;;;;GASG;AACH,wBAAsB,uBAAuB,CAC3C,WAAW,EAAE,MAAM,EAAE,EACrB,gBAAgB,EAAE,MAAM,GACvB,OAAO,CAAC,MAAM,CAAC,CAYjB"}

91
node_modules/@docusaurus/utils/lib/dataFileUtils.js generated vendored Normal file
View File

@@ -0,0 +1,91 @@
"use strict";
/**
* Copyright (c) Facebook, Inc. and its affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree.
*/
Object.defineProperty(exports, "__esModule", { value: true });
exports.getFolderContainingFile = exports.findFolderContainingFile = exports.getContentPathList = exports.getDataFileData = exports.getDataFilePath = void 0;
const tslib_1 = require("tslib");
const fs_extra_1 = tslib_1.__importDefault(require("fs-extra"));
const path_1 = tslib_1.__importDefault(require("path"));
const logger_1 = tslib_1.__importDefault(require("@docusaurus/logger"));
const js_yaml_1 = tslib_1.__importDefault(require("js-yaml"));
const index_1 = require("./index");
/**
* Looks for a data file in the potential content paths; loads a localized data
* file in priority.
*
* @returns An absolute path to the data file, or `undefined` if there isn't one.
*/
async function getDataFilePath({ filePath, contentPaths, }) {
const contentPath = await findFolderContainingFile(getContentPathList(contentPaths), filePath);
if (contentPath) {
return path_1.default.resolve(contentPath, filePath);
}
return undefined;
}
exports.getDataFilePath = getDataFilePath;
/**
* Looks up for a data file in the content paths, returns the object validated
* and normalized according to the `validate` callback.
*
* @returns `undefined` when file not found
* @throws Throws when validation fails, displaying a helpful context message.
*/
async function getDataFileData(params, validate) {
const filePath = await getDataFilePath(params);
if (!filePath) {
return undefined;
}
try {
const contentString = await fs_extra_1.default.readFile(filePath, { encoding: 'utf8' });
const unsafeContent = js_yaml_1.default.load(contentString);
return validate(unsafeContent);
}
catch (err) {
logger_1.default.error `The ${params.fileType} file at path=${filePath} looks invalid.`;
throw err;
}
}
exports.getDataFileData = getDataFileData;
/**
* Takes the `contentPaths` data structure and returns an ordered path list
* indicating their priorities. For all data, we look in the localized folder
* in priority.
*/
function getContentPathList(contentPaths) {
return [contentPaths.contentPathLocalized, contentPaths.contentPath];
}
exports.getContentPathList = getContentPathList;
/**
* @param folderPaths a list of absolute paths.
* @param relativeFilePath file path relative to each `folderPaths`.
* @returns the first folder path in which the file exists, or `undefined` if
* none is found.
*/
async function findFolderContainingFile(folderPaths, relativeFilePath) {
return (0, index_1.findAsyncSequential)(folderPaths, (folderPath) => fs_extra_1.default.pathExists(path_1.default.join(folderPath, relativeFilePath)));
}
exports.findFolderContainingFile = findFolderContainingFile;
/**
* Fail-fast alternative to `findFolderContainingFile`.
*
* @param folderPaths a list of absolute paths.
* @param relativeFilePath file path relative to each `folderPaths`.
* @returns the first folder path in which the file exists.
* @throws Throws if no file can be found. You should use this method only when
* you actually know the file exists (e.g. when the `relativeFilePath` is read
* with a glob and you are just trying to localize it)
*/
async function getFolderContainingFile(folderPaths, relativeFilePath) {
const maybeFolderPath = await findFolderContainingFile(folderPaths, relativeFilePath);
if (!maybeFolderPath) {
throw new Error(`File "${relativeFilePath}" does not exist in any of these folders:
- ${folderPaths.join('\n- ')}`);
}
return maybeFolderPath;
}
exports.getFolderContainingFile = getFolderContainingFile;
//# sourceMappingURL=dataFileUtils.js.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"dataFileUtils.js","sourceRoot":"","sources":["../src/dataFileUtils.ts"],"names":[],"mappings":";AAAA;;;;;GAKG;;;;AAEH,gEAA0B;AAC1B,wDAAwB;AACxB,wEAAwC;AACxC,8DAA2B;AAC3B,mCAA4C;AAa5C;;;;;GAKG;AACI,KAAK,UAAU,eAAe,CAAC,EACpC,QAAQ,EACR,YAAY,GACG;IACf,MAAM,WAAW,GAAG,MAAM,wBAAwB,CAChD,kBAAkB,CAAC,YAAY,CAAC,EAChC,QAAQ,CACT,CAAC;IACF,IAAI,WAAW,EAAE;QACf,OAAO,cAAI,CAAC,OAAO,CAAC,WAAW,EAAE,QAAQ,CAAC,CAAC;KAC5C;IACD,OAAO,SAAS,CAAC;AACnB,CAAC;AAZD,0CAYC;AAED;;;;;;GAMG;AACI,KAAK,UAAU,eAAe,CACnC,MAGC,EACD,QAAiC;IAEjC,MAAM,QAAQ,GAAG,MAAM,eAAe,CAAC,MAAM,CAAC,CAAC;IAC/C,IAAI,CAAC,QAAQ,EAAE;QACb,OAAO,SAAS,CAAC;KAClB;IACD,IAAI;QACF,MAAM,aAAa,GAAG,MAAM,kBAAE,CAAC,QAAQ,CAAC,QAAQ,EAAE,EAAC,QAAQ,EAAE,MAAM,EAAC,CAAC,CAAC;QACtE,MAAM,aAAa,GAAG,iBAAI,CAAC,IAAI,CAAC,aAAa,CAAC,CAAC;QAC/C,OAAO,QAAQ,CAAC,aAAa,CAAC,CAAC;KAChC;IAAC,OAAO,GAAG,EAAE;QACZ,gBAAM,CAAC,KAAK,CAAA,OAAO,MAAM,CAAC,QAAQ,iBAAiB,QAAQ,iBAAiB,CAAC;QAC7E,MAAM,GAAG,CAAC;KACX;AACH,CAAC;AAnBD,0CAmBC;AAED;;;;GAIG;AACH,SAAgB,kBAAkB,CAAC,YAA0B;IAC3D,OAAO,CAAC,YAAY,CAAC,oBAAoB,EAAE,YAAY,CAAC,WAAW,CAAC,CAAC;AACvE,CAAC;AAFD,gDAEC;AAED;;;;;GAKG;AACI,KAAK,UAAU,wBAAwB,CAC5C,WAAqB,EACrB,gBAAwB;IAExB,OAAO,IAAA,2BAAmB,EAAC,WAAW,EAAE,CAAC,UAAU,EAAE,EAAE,CACrD,kBAAE,CAAC,UAAU,CAAC,cAAI,CAAC,IAAI,CAAC,UAAU,EAAE,gBAAgB,CAAC,CAAC,CACvD,CAAC;AACJ,CAAC;AAPD,4DAOC;AAED;;;;;;;;;GASG;AACI,KAAK,UAAU,uBAAuB,CAC3C,WAAqB,EACrB,gBAAwB;IAExB,MAAM,eAAe,GAAG,MAAM,wBAAwB,CACpD,WAAW,EACX,gBAAgB,CACjB,CAAC;IACF,IAAI,CAAC,eAAe,EAAE;QACpB,MAAM,IAAI,KAAK,CACb,SAAS,gBAAgB;IAC3B,WAAW,CAAC,IAAI,CAAC,MAAM,CAAC,EAAE,CACzB,CAAC;KACH;IACD,OAAO,eAAe,CAAC;AACzB,CAAC;AAfD,0DAeC"}

32
node_modules/@docusaurus/utils/lib/emitUtils.d.ts generated vendored Normal file
View File

@@ -0,0 +1,32 @@
/**
* Copyright (c) Facebook, Inc. and its affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree.
*/
/// <reference types="node" />
/**
* Outputs a file to the generated files directory. Only writes files if content
* differs from cache (for hot reload performance).
*
* @param generatedFilesDir Absolute path.
* @param file Path relative to `generatedFilesDir`. File will always be
* outputted; no need to ensure directory exists.
* @param content String content to write.
* @param skipCache If `true` (defaults as `true` for production), file is
* force-rewritten, skipping cache.
*/
export declare function generate(generatedFilesDir: string, file: string, content: string, skipCache?: boolean): Promise<void>;
/**
* @param permalink The URL that the HTML file corresponds to, without base URL
* @param outDir Full path to the output directory
* @param trailingSlash The site config option. If provided, only one path will
* be read.
* @returns This returns a buffer, which you have to decode string yourself if
* needed. (Not always necessary since the output isn't for human consumption
* anyways, and most HTML manipulation libs accept buffers)
* @throws Throws when the HTML file is not found at any of the potential paths.
* This should never happen as it would lead to a 404.
*/
export declare function readOutputHTMLFile(permalink: string, outDir: string, trailingSlash: boolean | undefined): Promise<Buffer>;
//# sourceMappingURL=emitUtils.d.ts.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"emitUtils.d.ts","sourceRoot":"","sources":["../src/emitUtils.ts"],"names":[],"mappings":"AAAA;;;;;GAKG;;AASH;;;;;;;;;;GAUG;AACH,wBAAsB,QAAQ,CAC5B,iBAAiB,EAAE,MAAM,EACzB,IAAI,EAAE,MAAM,EACZ,OAAO,EAAE,MAAM,EACf,SAAS,GAAE,OAA+C,GACzD,OAAO,CAAC,IAAI,CAAC,CAgCf;AAED;;;;;;;;;;GAUG;AACH,wBAAsB,kBAAkB,CACtC,SAAS,EAAE,MAAM,EACjB,MAAM,EAAE,MAAM,EACd,aAAa,EAAE,OAAO,GAAG,SAAS,GACjC,OAAO,CAAC,MAAM,CAAC,CAqBjB"}

85
node_modules/@docusaurus/utils/lib/emitUtils.js generated vendored Normal file
View File

@@ -0,0 +1,85 @@
"use strict";
/**
* Copyright (c) Facebook, Inc. and its affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree.
*/
Object.defineProperty(exports, "__esModule", { value: true });
exports.readOutputHTMLFile = exports.generate = void 0;
const tslib_1 = require("tslib");
const path_1 = tslib_1.__importDefault(require("path"));
const fs_extra_1 = tslib_1.__importDefault(require("fs-extra"));
const crypto_1 = require("crypto");
const jsUtils_1 = require("./jsUtils");
const fileHash = new Map();
/**
* Outputs a file to the generated files directory. Only writes files if content
* differs from cache (for hot reload performance).
*
* @param generatedFilesDir Absolute path.
* @param file Path relative to `generatedFilesDir`. File will always be
* outputted; no need to ensure directory exists.
* @param content String content to write.
* @param skipCache If `true` (defaults as `true` for production), file is
* force-rewritten, skipping cache.
*/
async function generate(generatedFilesDir, file, content, skipCache = process.env.NODE_ENV === 'production') {
const filepath = path_1.default.resolve(generatedFilesDir, file);
if (skipCache) {
await fs_extra_1.default.outputFile(filepath, content);
// Cache still needs to be reset, otherwise, writing "A", "B", and "A" where
// "B" skips cache will cause the last "A" not be able to overwrite as the
// first "A" remains in cache. But if the file never existed in cache, no
// need to register it.
if (fileHash.get(filepath)) {
fileHash.set(filepath, (0, crypto_1.createHash)('md5').update(content).digest('hex'));
}
return;
}
let lastHash = fileHash.get(filepath);
// If file already exists but it's not in runtime cache yet, we try to
// calculate the content hash and then compare. This is to avoid unnecessary
// overwriting and we can reuse old file.
if (!lastHash && (await fs_extra_1.default.pathExists(filepath))) {
const lastContent = await fs_extra_1.default.readFile(filepath, 'utf8');
lastHash = (0, crypto_1.createHash)('md5').update(lastContent).digest('hex');
fileHash.set(filepath, lastHash);
}
const currentHash = (0, crypto_1.createHash)('md5').update(content).digest('hex');
if (lastHash !== currentHash) {
await fs_extra_1.default.outputFile(filepath, content);
fileHash.set(filepath, currentHash);
}
}
exports.generate = generate;
/**
* @param permalink The URL that the HTML file corresponds to, without base URL
* @param outDir Full path to the output directory
* @param trailingSlash The site config option. If provided, only one path will
* be read.
* @returns This returns a buffer, which you have to decode string yourself if
* needed. (Not always necessary since the output isn't for human consumption
* anyways, and most HTML manipulation libs accept buffers)
* @throws Throws when the HTML file is not found at any of the potential paths.
* This should never happen as it would lead to a 404.
*/
async function readOutputHTMLFile(permalink, outDir, trailingSlash) {
const withTrailingSlashPath = path_1.default.join(outDir, permalink, 'index.html');
const withoutTrailingSlashPath = (() => {
const basePath = path_1.default.join(outDir, permalink.replace(/\/$/, ''));
const htmlSuffix = /\.html?$/i.test(basePath) ? '' : '.html';
return `${basePath}${htmlSuffix}`;
})();
const possibleHtmlPaths = [
trailingSlash !== false && withTrailingSlashPath,
trailingSlash !== true && withoutTrailingSlashPath,
].filter((p) => Boolean(p));
const HTMLPath = await (0, jsUtils_1.findAsyncSequential)(possibleHtmlPaths, fs_extra_1.default.pathExists);
if (!HTMLPath) {
throw new Error(`Expected output HTML file to be found at ${withTrailingSlashPath} for permalink ${permalink}.`);
}
return fs_extra_1.default.readFile(HTMLPath);
}
exports.readOutputHTMLFile = readOutputHTMLFile;
//# sourceMappingURL=emitUtils.js.map

1
node_modules/@docusaurus/utils/lib/emitUtils.js.map generated vendored Normal file
View File

@@ -0,0 +1 @@
{"version":3,"file":"emitUtils.js","sourceRoot":"","sources":["../src/emitUtils.ts"],"names":[],"mappings":";AAAA;;;;;GAKG;;;;AAEH,wDAAwB;AACxB,gEAA0B;AAC1B,mCAAkC;AAClC,uCAA8C;AAE9C,MAAM,QAAQ,GAAG,IAAI,GAAG,EAAkB,CAAC;AAE3C;;;;;;;;;;GAUG;AACI,KAAK,UAAU,QAAQ,CAC5B,iBAAyB,EACzB,IAAY,EACZ,OAAe,EACf,YAAqB,OAAO,CAAC,GAAG,CAAC,QAAQ,KAAK,YAAY;IAE1D,MAAM,QAAQ,GAAG,cAAI,CAAC,OAAO,CAAC,iBAAiB,EAAE,IAAI,CAAC,CAAC;IAEvD,IAAI,SAAS,EAAE;QACb,MAAM,kBAAE,CAAC,UAAU,CAAC,QAAQ,EAAE,OAAO,CAAC,CAAC;QACvC,4EAA4E;QAC5E,0EAA0E;QAC1E,yEAAyE;QACzE,uBAAuB;QACvB,IAAI,QAAQ,CAAC,GAAG,CAAC,QAAQ,CAAC,EAAE;YAC1B,QAAQ,CAAC,GAAG,CAAC,QAAQ,EAAE,IAAA,mBAAU,EAAC,KAAK,CAAC,CAAC,MAAM,CAAC,OAAO,CAAC,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC,CAAC;SACzE;QACD,OAAO;KACR;IAED,IAAI,QAAQ,GAAG,QAAQ,CAAC,GAAG,CAAC,QAAQ,CAAC,CAAC;IAEtC,sEAAsE;IACtE,4EAA4E;IAC5E,yCAAyC;IACzC,IAAI,CAAC,QAAQ,IAAI,CAAC,MAAM,kBAAE,CAAC,UAAU,CAAC,QAAQ,CAAC,CAAC,EAAE;QAChD,MAAM,WAAW,GAAG,MAAM,kBAAE,CAAC,QAAQ,CAAC,QAAQ,EAAE,MAAM,CAAC,CAAC;QACxD,QAAQ,GAAG,IAAA,mBAAU,EAAC,KAAK,CAAC,CAAC,MAAM,CAAC,WAAW,CAAC,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC;QAC/D,QAAQ,CAAC,GAAG,CAAC,QAAQ,EAAE,QAAQ,CAAC,CAAC;KAClC;IAED,MAAM,WAAW,GAAG,IAAA,mBAAU,EAAC,KAAK,CAAC,CAAC,MAAM,CAAC,OAAO,CAAC,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC;IAEpE,IAAI,QAAQ,KAAK,WAAW,EAAE;QAC5B,MAAM,kBAAE,CAAC,UAAU,CAAC,QAAQ,EAAE,OAAO,CAAC,CAAC;QACvC,QAAQ,CAAC,GAAG,CAAC,QAAQ,EAAE,WAAW,CAAC,CAAC;KACrC;AACH,CAAC;AArCD,4BAqCC;AAED;;;;;;;;;;GAUG;AACI,KAAK,UAAU,kBAAkB,CACtC,SAAiB,EACjB,MAAc,EACd,aAAkC;IAElC,MAAM,qBAAqB,GAAG,cAAI,CAAC,IAAI,CAAC,MAAM,EAAE,SAAS,EAAE,YAAY,CAAC,CAAC;IACzE,MAAM,wBAAwB,GAAG,CAAC,GAAG,EAAE;QACrC,MAAM,QAAQ,GAAG,cAAI,CAAC,IAAI,CAAC,MAAM,EAAE,SAAS,CAAC,OAAO,CAAC,KAAK,EAAE,EAAE,CAAC,CAAC,CAAC;QACjE,MAAM,UAAU,GAAG,WAAW,CAAC,IAAI,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,OAAO,CAAC;QAC7D,OAAO,GAAG,QAAQ,GAAG,UAAU,EAAE,CAAC;IACpC,CAAC,CAAC,EAAE,CAAC;IAEL,MAAM,iBAAiB,GAAG;QACxB,aAAa,KAAK,KAAK,IAAI,qBAAqB;QAChD,aAAa,KAAK,IAAI,IAAI,wBAAwB;KACnD,CAAC,MAAM,CAAC,CAAC,CAAC,EAAe,EAAE,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC,CAAC;IAEzC,MAAM,QAAQ,GAAG,MAAM,IAAA,6BAAmB,EAAC,iBAAiB,EAAE,kBAAE,CAAC,UAAU,CAAC,CAAC;IAE7E,IAAI,CAAC,QAAQ,EAAE;QACb,MAAM,IAAI,KAAK,CACb,4CAA4C,qBAAqB,kBAAkB,SAAS,GAAG,CAChG,CAAC;KACH;IACD,OAAO,kBAAE,CAAC,QAAQ,CAAC,QAAQ,CAAC,CAAC;AAC/B,CAAC;AAzBD,gDAyBC"}

66
node_modules/@docusaurus/utils/lib/gitUtils.d.ts generated vendored Normal file
View File

@@ -0,0 +1,66 @@
/**
* Copyright (c) Facebook, Inc. and its affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree.
*/
/** Custom error thrown when git is not found in `PATH`. */
export declare class GitNotFoundError extends Error {
}
/** Custom error thrown when the current file is not tracked by git. */
export declare class FileNotTrackedError extends Error {
}
/**
* Fetches the git history of a file and returns a relevant commit date.
* It gets the commit date instead of author date so that amended commits
* can have their dates updated.
*
* @throws {@link GitNotFoundError} If git is not found in `PATH`.
* @throws {@link FileNotTrackedError} If the current file is not tracked by git.
* @throws Also throws when `git log` exited with non-zero, or when it outputs
* unexpected text.
*/
export declare function getFileCommitDate(
/** Absolute path to the file. */
file: string, args: {
/**
* `"oldest"` is the commit that added the file, following renames;
* `"newest"` is the last commit that edited the file.
*/
age?: 'oldest' | 'newest';
/** Use `includeAuthor: true` to get the author information as well. */
includeAuthor?: false;
}): {
/** Relevant commit date. */
date: Date;
/** Timestamp in **seconds**, as returned from git. */
timestamp: number;
};
/**
* Fetches the git history of a file and returns a relevant commit date.
* It gets the commit date instead of author date so that amended commits
* can have their dates updated.
*
* @throws {@link GitNotFoundError} If git is not found in `PATH`.
* @throws {@link FileNotTrackedError} If the current file is not tracked by git.
* @throws Also throws when `git log` exited with non-zero, or when it outputs
* unexpected text.
*/
export declare function getFileCommitDate(
/** Absolute path to the file. */
file: string, args: {
/**
* `"oldest"` is the commit that added the file, following renames;
* `"newest"` is the last commit that edited the file.
*/
age?: 'oldest' | 'newest';
includeAuthor: true;
}): {
/** Relevant commit date. */
date: Date;
/** Timestamp in **seconds**, as returned from git. */
timestamp: number;
/** The author's name, as returned from git. */
author: string;
};
//# sourceMappingURL=gitUtils.d.ts.map

1
node_modules/@docusaurus/utils/lib/gitUtils.d.ts.map generated vendored Normal file
View File

@@ -0,0 +1 @@
{"version":3,"file":"gitUtils.d.ts","sourceRoot":"","sources":["../src/gitUtils.ts"],"names":[],"mappings":"AAAA;;;;;GAKG;AAKH,2DAA2D;AAC3D,qBAAa,gBAAiB,SAAQ,KAAK;CAAG;AAE9C,uEAAuE;AACvE,qBAAa,mBAAoB,SAAQ,KAAK;CAAG;AAEjD;;;;;;;;;GASG;AACH,wBAAgB,iBAAiB;AAC/B,iCAAiC;AACjC,IAAI,EAAE,MAAM,EACZ,IAAI,EAAE;IACJ;;;OAGG;IACH,GAAG,CAAC,EAAE,QAAQ,GAAG,QAAQ,CAAC;IAC1B,uEAAuE;IACvE,aAAa,CAAC,EAAE,KAAK,CAAC;CACvB,GACA;IACD,4BAA4B;IAC5B,IAAI,EAAE,IAAI,CAAC;IACX,sDAAsD;IACtD,SAAS,EAAE,MAAM,CAAC;CACnB,CAAC;AACF;;;;;;;;;GASG;AACH,wBAAgB,iBAAiB;AAC/B,iCAAiC;AACjC,IAAI,EAAE,MAAM,EACZ,IAAI,EAAE;IACJ;;;OAGG;IACH,GAAG,CAAC,EAAE,QAAQ,GAAG,QAAQ,CAAC;IAC1B,aAAa,EAAE,IAAI,CAAC;CACrB,GACA;IACD,4BAA4B;IAC5B,IAAI,EAAE,IAAI,CAAC;IACX,sDAAsD;IACtD,SAAS,EAAE,MAAM,CAAC;IAClB,+CAA+C;IAC/C,MAAM,EAAE,MAAM,CAAC;CAChB,CAAC"}

63
node_modules/@docusaurus/utils/lib/gitUtils.js generated vendored Normal file
View File

@@ -0,0 +1,63 @@
"use strict";
/**
* Copyright (c) Facebook, Inc. and its affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree.
*/
Object.defineProperty(exports, "__esModule", { value: true });
exports.getFileCommitDate = exports.FileNotTrackedError = exports.GitNotFoundError = void 0;
const tslib_1 = require("tslib");
const path_1 = tslib_1.__importDefault(require("path"));
const shelljs_1 = tslib_1.__importDefault(require("shelljs"));
/** Custom error thrown when git is not found in `PATH`. */
class GitNotFoundError extends Error {
}
exports.GitNotFoundError = GitNotFoundError;
/** Custom error thrown when the current file is not tracked by git. */
class FileNotTrackedError extends Error {
}
exports.FileNotTrackedError = FileNotTrackedError;
function getFileCommitDate(file, { age = 'oldest', includeAuthor = false, }) {
if (!shelljs_1.default.which('git')) {
throw new GitNotFoundError(`Failed to retrieve git history for "${file}" because git is not installed.`);
}
if (!shelljs_1.default.test('-f', file)) {
throw new Error(`Failed to retrieve git history for "${file}" because the file does not exist.`);
}
const args = [
`--format=%ct${includeAuthor ? ',%an' : ''}`,
'--max-count=1',
age === 'oldest' ? '--follow --diff-filter=A' : undefined,
]
.filter(Boolean)
.join(' ');
const result = shelljs_1.default.exec(`git log ${args} -- "${path_1.default.basename(file)}"`, {
// Setting cwd is important, see: https://github.com/facebook/docusaurus/pull/5048
cwd: path_1.default.dirname(file),
silent: true,
});
if (result.code !== 0) {
throw new Error(`Failed to retrieve the git history for file "${file}" with exit code ${result.code}: ${result.stderr}`);
}
let regex = /^(?<timestamp>\d+)$/;
if (includeAuthor) {
regex = /^(?<timestamp>\d+),(?<author>.+)$/;
}
const output = result.stdout.trim();
if (!output) {
throw new FileNotTrackedError(`Failed to retrieve the git history for file "${file}" because the file is not tracked by git.`);
}
const match = output.match(regex);
if (!match) {
throw new Error(`Failed to retrieve the git history for file "${file}" with unexpected output: ${output}`);
}
const timestamp = Number(match.groups.timestamp);
const date = new Date(timestamp * 1000);
if (includeAuthor) {
return { date, timestamp, author: match.groups.author };
}
return { date, timestamp };
}
exports.getFileCommitDate = getFileCommitDate;
//# sourceMappingURL=gitUtils.js.map

1
node_modules/@docusaurus/utils/lib/gitUtils.js.map generated vendored Normal file
View File

@@ -0,0 +1 @@
{"version":3,"file":"gitUtils.js","sourceRoot":"","sources":["../src/gitUtils.ts"],"names":[],"mappings":";AAAA;;;;;GAKG;;;;AAEH,wDAAwB;AACxB,8DAA4B;AAE5B,2DAA2D;AAC3D,MAAa,gBAAiB,SAAQ,KAAK;CAAG;AAA9C,4CAA8C;AAE9C,uEAAuE;AACvE,MAAa,mBAAoB,SAAQ,KAAK;CAAG;AAAjD,kDAAiD;AA2DjD,SAAgB,iBAAiB,CAC/B,IAAY,EACZ,EACE,GAAG,GAAG,QAAQ,EACd,aAAa,GAAG,KAAK,GAItB;IAMD,IAAI,CAAC,iBAAK,CAAC,KAAK,CAAC,KAAK,CAAC,EAAE;QACvB,MAAM,IAAI,gBAAgB,CACxB,uCAAuC,IAAI,iCAAiC,CAC7E,CAAC;KACH;IAED,IAAI,CAAC,iBAAK,CAAC,IAAI,CAAC,IAAI,EAAE,IAAI,CAAC,EAAE;QAC3B,MAAM,IAAI,KAAK,CACb,uCAAuC,IAAI,oCAAoC,CAChF,CAAC;KACH;IAED,MAAM,IAAI,GAAG;QACX,eAAe,aAAa,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE,EAAE;QAC5C,eAAe;QACf,GAAG,KAAK,QAAQ,CAAC,CAAC,CAAC,0BAA0B,CAAC,CAAC,CAAC,SAAS;KAC1D;SACE,MAAM,CAAC,OAAO,CAAC;SACf,IAAI,CAAC,GAAG,CAAC,CAAC;IAEb,MAAM,MAAM,GAAG,iBAAK,CAAC,IAAI,CAAC,WAAW,IAAI,QAAQ,cAAI,CAAC,QAAQ,CAAC,IAAI,CAAC,GAAG,EAAE;QACvE,kFAAkF;QAClF,GAAG,EAAE,cAAI,CAAC,OAAO,CAAC,IAAI,CAAC;QACvB,MAAM,EAAE,IAAI;KACb,CAAC,CAAC;IACH,IAAI,MAAM,CAAC,IAAI,KAAK,CAAC,EAAE;QACrB,MAAM,IAAI,KAAK,CACb,gDAAgD,IAAI,oBAAoB,MAAM,CAAC,IAAI,KAAK,MAAM,CAAC,MAAM,EAAE,CACxG,CAAC;KACH;IACD,IAAI,KAAK,GAAG,qBAAqB,CAAC;IAClC,IAAI,aAAa,EAAE;QACjB,KAAK,GAAG,mCAAmC,CAAC;KAC7C;IAED,MAAM,MAAM,GAAG,MAAM,CAAC,MAAM,CAAC,IAAI,EAAE,CAAC;IAEpC,IAAI,CAAC,MAAM,EAAE;QACX,MAAM,IAAI,mBAAmB,CAC3B,gDAAgD,IAAI,2CAA2C,CAChG,CAAC;KACH;IAED,MAAM,KAAK,GAAG,MAAM,CAAC,KAAK,CAAC,KAAK,CAAC,CAAC;IAElC,IAAI,CAAC,KAAK,EAAE;QACV,MAAM,IAAI,KAAK,CACb,gDAAgD,IAAI,6BAA6B,MAAM,EAAE,CAC1F,CAAC;KACH;IAED,MAAM,SAAS,GAAG,MAAM,CAAC,KAAK,CAAC,MAAO,CAAC,SAAS,CAAC,CAAC;IAClD,MAAM,IAAI,GAAG,IAAI,IAAI,CAAC,SAAS,GAAG,IAAI,CAAC,CAAC;IAExC,IAAI,aAAa,EAAE;QACjB,OAAO,EAAC,IAAI,EAAE,SAAS,EAAE,MAAM,EAAE,KAAK,CAAC,MAAO,CAAC,MAAO,EAAC,CAAC;KACzD;IACD,OAAO,EAAC,IAAI,EAAE,SAAS,EAAC,CAAC;AAC3B,CAAC;AAxED,8CAwEC"}

40
node_modules/@docusaurus/utils/lib/globUtils.d.ts generated vendored Normal file
View File

@@ -0,0 +1,40 @@
/**
* Copyright (c) Facebook, Inc. and its affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree.
*/
/** A re-export of the globby instance. */
export { default as Globby } from 'globby';
/**
* The default glob patterns we ignore when sourcing content.
* - Ignore files and folders starting with `_` recursively
* - Ignore tests
*/
export declare const GlobExcludeDefault: string[];
type Matcher = (str: string) => boolean;
/**
* A very thin wrapper around `Micromatch.makeRe`.
*
* @see {@link createAbsoluteFilePathMatcher}
* @param patterns A list of glob patterns. If the list is empty, it defaults to
* matching none.
* @returns A matcher handle that tells if a file path is matched by any of the
* patterns.
*/
export declare function createMatcher(patterns: string[]): Matcher;
/**
* We use match patterns like `"** /_* /**"` (ignore the spaces), where `"_*"`
* should only be matched within a subfolder. This function would:
* - Match `/user/sebastien/website/docs/_partials/xyz.md`
* - Ignore `/user/_sebastien/website/docs/partials/xyz.md`
*
* @param patterns A list of glob patterns.
* @param rootFolders A list of root folders to resolve the glob from.
* @returns A matcher handle that tells if a file path is matched by any of the
* patterns, resolved from the first root folder that contains the path.
* @throws Throws when the returned matcher receives a path that doesn't belong
* to any of the `rootFolders`.
*/
export declare function createAbsoluteFilePathMatcher(patterns: string[], rootFolders: string[]): Matcher;
//# sourceMappingURL=globUtils.d.ts.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"globUtils.d.ts","sourceRoot":"","sources":["../src/globUtils.ts"],"names":[],"mappings":"AAAA;;;;;GAKG;AAQH,0CAA0C;AAC1C,OAAO,EAAC,OAAO,IAAI,MAAM,EAAC,MAAM,QAAQ,CAAC;AAEzC;;;;GAIG;AACH,eAAO,MAAM,kBAAkB,UAK9B,CAAC;AAEF,KAAK,OAAO,GAAG,CAAC,GAAG,EAAE,MAAM,KAAK,OAAO,CAAC;AAExC;;;;;;;;GAQG;AACH,wBAAgB,aAAa,CAAC,QAAQ,EAAE,MAAM,EAAE,GAAG,OAAO,CASzD;AAED;;;;;;;;;;;;GAYG;AACH,wBAAgB,6BAA6B,CAC3C,QAAQ,EAAE,MAAM,EAAE,EAClB,WAAW,EAAE,MAAM,EAAE,GACpB,OAAO,CAqBT"}

72
node_modules/@docusaurus/utils/lib/globUtils.js generated vendored Normal file
View File

@@ -0,0 +1,72 @@
"use strict";
/**
* Copyright (c) Facebook, Inc. and its affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree.
*/
Object.defineProperty(exports, "__esModule", { value: true });
exports.createAbsoluteFilePathMatcher = exports.createMatcher = exports.GlobExcludeDefault = exports.Globby = void 0;
const tslib_1 = require("tslib");
// Globby/Micromatch are the 2 libs we use in Docusaurus consistently
const path_1 = tslib_1.__importDefault(require("path"));
const micromatch_1 = tslib_1.__importDefault(require("micromatch")); // Note: Micromatch is used by Globby
const jsUtils_1 = require("./jsUtils");
/** A re-export of the globby instance. */
var globby_1 = require("globby");
Object.defineProperty(exports, "Globby", { enumerable: true, get: function () { return tslib_1.__importDefault(globby_1).default; } });
/**
* The default glob patterns we ignore when sourcing content.
* - Ignore files and folders starting with `_` recursively
* - Ignore tests
*/
exports.GlobExcludeDefault = [
'**/_*.{js,jsx,ts,tsx,md,mdx}',
'**/_*/**',
'**/*.test.{js,jsx,ts,tsx}',
'**/__tests__/**',
];
/**
* A very thin wrapper around `Micromatch.makeRe`.
*
* @see {@link createAbsoluteFilePathMatcher}
* @param patterns A list of glob patterns. If the list is empty, it defaults to
* matching none.
* @returns A matcher handle that tells if a file path is matched by any of the
* patterns.
*/
function createMatcher(patterns) {
if (patterns.length === 0) {
// `/(?:)/.test("foo")` is `true`
return () => false;
}
const regexp = new RegExp(patterns.map((pattern) => micromatch_1.default.makeRe(pattern).source).join('|'));
return (str) => regexp.test(str);
}
exports.createMatcher = createMatcher;
/**
* We use match patterns like `"** /_* /**"` (ignore the spaces), where `"_*"`
* should only be matched within a subfolder. This function would:
* - Match `/user/sebastien/website/docs/_partials/xyz.md`
* - Ignore `/user/_sebastien/website/docs/partials/xyz.md`
*
* @param patterns A list of glob patterns.
* @param rootFolders A list of root folders to resolve the glob from.
* @returns A matcher handle that tells if a file path is matched by any of the
* patterns, resolved from the first root folder that contains the path.
* @throws Throws when the returned matcher receives a path that doesn't belong
* to any of the `rootFolders`.
*/
function createAbsoluteFilePathMatcher(patterns, rootFolders) {
const matcher = createMatcher(patterns);
function getRelativeFilePath(absoluteFilePath) {
const rootFolder = rootFolders.find((folderPath) => [(0, jsUtils_1.addSuffix)(folderPath, '/'), (0, jsUtils_1.addSuffix)(folderPath, '\\')].some((p) => absoluteFilePath.startsWith(p)));
if (!rootFolder) {
throw new Error(`createAbsoluteFilePathMatcher unexpected error, absoluteFilePath=${absoluteFilePath} was not contained in any of the root folders: ${rootFolders.join(', ')}`);
}
return path_1.default.relative(rootFolder, absoluteFilePath);
}
return (absoluteFilePath) => matcher(getRelativeFilePath(absoluteFilePath));
}
exports.createAbsoluteFilePathMatcher = createAbsoluteFilePathMatcher;
//# sourceMappingURL=globUtils.js.map

1
node_modules/@docusaurus/utils/lib/globUtils.js.map generated vendored Normal file
View File

@@ -0,0 +1 @@
{"version":3,"file":"globUtils.js","sourceRoot":"","sources":["../src/globUtils.ts"],"names":[],"mappings":";AAAA;;;;;GAKG;;;;AAEH,qEAAqE;AAErE,wDAAwB;AACxB,oEAAoC,CAAC,qCAAqC;AAC1E,uCAAoC;AAEpC,0CAA0C;AAC1C,iCAAyC;AAAjC,yHAAA,OAAO,OAAU;AAEzB;;;;GAIG;AACU,QAAA,kBAAkB,GAAG;IAChC,8BAA8B;IAC9B,UAAU;IACV,2BAA2B;IAC3B,iBAAiB;CAClB,CAAC;AAIF;;;;;;;;GAQG;AACH,SAAgB,aAAa,CAAC,QAAkB;IAC9C,IAAI,QAAQ,CAAC,MAAM,KAAK,CAAC,EAAE;QACzB,iCAAiC;QACjC,OAAO,GAAG,EAAE,CAAC,KAAK,CAAC;KACpB;IACD,MAAM,MAAM,GAAG,IAAI,MAAM,CACvB,QAAQ,CAAC,GAAG,CAAC,CAAC,OAAO,EAAE,EAAE,CAAC,oBAAU,CAAC,MAAM,CAAC,OAAO,CAAC,CAAC,MAAM,CAAC,CAAC,IAAI,CAAC,GAAG,CAAC,CACvE,CAAC;IACF,OAAO,CAAC,GAAG,EAAE,EAAE,CAAC,MAAM,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;AACnC,CAAC;AATD,sCASC;AAED;;;;;;;;;;;;GAYG;AACH,SAAgB,6BAA6B,CAC3C,QAAkB,EAClB,WAAqB;IAErB,MAAM,OAAO,GAAG,aAAa,CAAC,QAAQ,CAAC,CAAC;IAExC,SAAS,mBAAmB,CAAC,gBAAwB;QACnD,MAAM,UAAU,GAAG,WAAW,CAAC,IAAI,CAAC,CAAC,UAAU,EAAE,EAAE,CACjD,CAAC,IAAA,mBAAS,EAAC,UAAU,EAAE,GAAG,CAAC,EAAE,IAAA,mBAAS,EAAC,UAAU,EAAE,IAAI,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,EAAE,CACnE,gBAAgB,CAAC,UAAU,CAAC,CAAC,CAAC,CAC/B,CACF,CAAC;QACF,IAAI,CAAC,UAAU,EAAE;YACf,MAAM,IAAI,KAAK,CACb,oEAAoE,gBAAgB,kDAAkD,WAAW,CAAC,IAAI,CACpJ,IAAI,CACL,EAAE,CACJ,CAAC;SACH;QACD,OAAO,cAAI,CAAC,QAAQ,CAAC,UAAU,EAAE,gBAAgB,CAAC,CAAC;IACrD,CAAC;IAED,OAAO,CAAC,gBAAwB,EAAE,EAAE,CAClC,OAAO,CAAC,mBAAmB,CAAC,gBAAgB,CAAC,CAAC,CAAC;AACnD,CAAC;AAxBD,sEAwBC"}

17
node_modules/@docusaurus/utils/lib/hashUtils.d.ts generated vendored Normal file
View File

@@ -0,0 +1,17 @@
/**
* Copyright (c) Facebook, Inc. and its affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree.
*/
/** Thin wrapper around `crypto.createHash("md5")`. */
export declare function md5Hash(str: string): string;
/** Creates an MD5 hash and truncates it to the given length. */
export declare function simpleHash(str: string, length: number): string;
/**
* Given an input string, convert to kebab-case and append a hash, avoiding name
* collision. Also removes part of the string if its larger than the allowed
* filename per OS, avoiding `ERRNAMETOOLONG` error.
*/
export declare function docuHash(str: string): string;
//# sourceMappingURL=hashUtils.d.ts.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"hashUtils.d.ts","sourceRoot":"","sources":["../src/hashUtils.ts"],"names":[],"mappings":"AAAA;;;;;GAKG;AAMH,sDAAsD;AACtD,wBAAgB,OAAO,CAAC,GAAG,EAAE,MAAM,GAAG,MAAM,CAE3C;AAED,gEAAgE;AAChE,wBAAgB,UAAU,CAAC,GAAG,EAAE,MAAM,EAAE,MAAM,EAAE,MAAM,GAAG,MAAM,CAE9D;AAGD;;;;GAIG;AACH,wBAAgB,QAAQ,CAAC,GAAG,EAAE,MAAM,GAAG,MAAM,CAU5C"}

42
node_modules/@docusaurus/utils/lib/hashUtils.js generated vendored Normal file
View File

@@ -0,0 +1,42 @@
"use strict";
/**
* Copyright (c) Facebook, Inc. and its affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree.
*/
Object.defineProperty(exports, "__esModule", { value: true });
exports.docuHash = exports.simpleHash = exports.md5Hash = void 0;
const tslib_1 = require("tslib");
const crypto_1 = require("crypto");
const lodash_1 = tslib_1.__importDefault(require("lodash"));
const pathUtils_1 = require("./pathUtils");
/** Thin wrapper around `crypto.createHash("md5")`. */
function md5Hash(str) {
return (0, crypto_1.createHash)('md5').update(str).digest('hex');
}
exports.md5Hash = md5Hash;
/** Creates an MD5 hash and truncates it to the given length. */
function simpleHash(str, length) {
return md5Hash(str).substring(0, length);
}
exports.simpleHash = simpleHash;
// Based on https://github.com/gatsbyjs/gatsby/pull/21518/files
/**
* Given an input string, convert to kebab-case and append a hash, avoiding name
* collision. Also removes part of the string if its larger than the allowed
* filename per OS, avoiding `ERRNAMETOOLONG` error.
*/
function docuHash(str) {
if (str === '/') {
return 'index';
}
const shortHash = simpleHash(str, 3);
const parsedPath = `${lodash_1.default.kebabCase(str)}-${shortHash}`;
if ((0, pathUtils_1.isNameTooLong)(parsedPath)) {
return `${(0, pathUtils_1.shortName)(lodash_1.default.kebabCase(str))}-${shortHash}`;
}
return parsedPath;
}
exports.docuHash = docuHash;
//# sourceMappingURL=hashUtils.js.map

1
node_modules/@docusaurus/utils/lib/hashUtils.js.map generated vendored Normal file
View File

@@ -0,0 +1 @@
{"version":3,"file":"hashUtils.js","sourceRoot":"","sources":["../src/hashUtils.ts"],"names":[],"mappings":";AAAA;;;;;GAKG;;;;AAEH,mCAAkC;AAClC,4DAAuB;AACvB,2CAAqD;AAErD,sDAAsD;AACtD,SAAgB,OAAO,CAAC,GAAW;IACjC,OAAO,IAAA,mBAAU,EAAC,KAAK,CAAC,CAAC,MAAM,CAAC,GAAG,CAAC,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC;AACrD,CAAC;AAFD,0BAEC;AAED,gEAAgE;AAChE,SAAgB,UAAU,CAAC,GAAW,EAAE,MAAc;IACpD,OAAO,OAAO,CAAC,GAAG,CAAC,CAAC,SAAS,CAAC,CAAC,EAAE,MAAM,CAAC,CAAC;AAC3C,CAAC;AAFD,gCAEC;AAED,+DAA+D;AAC/D;;;;GAIG;AACH,SAAgB,QAAQ,CAAC,GAAW;IAClC,IAAI,GAAG,KAAK,GAAG,EAAE;QACf,OAAO,OAAO,CAAC;KAChB;IACD,MAAM,SAAS,GAAG,UAAU,CAAC,GAAG,EAAE,CAAC,CAAC,CAAC;IACrC,MAAM,UAAU,GAAG,GAAG,gBAAC,CAAC,SAAS,CAAC,GAAG,CAAC,IAAI,SAAS,EAAE,CAAC;IACtD,IAAI,IAAA,yBAAa,EAAC,UAAU,CAAC,EAAE;QAC7B,OAAO,GAAG,IAAA,qBAAS,EAAC,gBAAC,CAAC,SAAS,CAAC,GAAG,CAAC,CAAC,IAAI,SAAS,EAAE,CAAC;KACtD;IACD,OAAO,UAAU,CAAC;AACpB,CAAC;AAVD,4BAUC"}

53
node_modules/@docusaurus/utils/lib/i18nUtils.d.ts generated vendored Normal file
View File

@@ -0,0 +1,53 @@
/**
* Copyright (c) Facebook, Inc. and its affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree.
*/
import type { TranslationFileContent, TranslationFile, I18n } from '@docusaurus/types';
/**
* Takes a list of translation file contents, and shallow-merges them into one.
*/
export declare function mergeTranslations(contents: TranslationFileContent[]): TranslationFileContent;
/**
* Useful to update all the messages of a translation file. Used in tests to
* simulate translations.
*/
export declare function updateTranslationFileMessages(translationFile: TranslationFile, updateMessage: (message: string) => string): TranslationFile;
/**
* Takes everything needed and constructs a plugin i18n path. Plugins should
* expect everything it needs for translations to be found under this path.
*/
export declare function getPluginI18nPath({ localizationDir, pluginName, pluginId, subPaths, }: {
localizationDir: string;
pluginName: string;
pluginId?: string | undefined;
subPaths?: string[];
}): string;
/**
* Takes a path and returns a localized a version (which is basically `path +
* i18n.currentLocale`).
*
* This is used to resolve the `outDir` and `baseUrl` of each locale; it is NOT
* used to determine plugin localization file locations.
*/
export declare function localizePath({ pathType, path: originalPath, i18n, options, }: {
/**
* FS paths will treat Windows specially; URL paths will always have a
* trailing slash to make it a valid base URL.
*/
pathType: 'fs' | 'url';
/** The path, URL or file path, to be localized. */
path: string;
/** The current i18n context. */
i18n: I18n;
options?: {
/**
* By default, we don't localize the path of defaultLocale. This option
* would override that behavior. Setting `false` is useful for `yarn build
* -l zh-Hans` to always emit into the root build directory.
*/
localizePath?: boolean;
};
}): string;
//# sourceMappingURL=i18nUtils.d.ts.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"i18nUtils.d.ts","sourceRoot":"","sources":["../src/i18nUtils.ts"],"names":[],"mappings":"AAAA;;;;;GAKG;AAMH,OAAO,KAAK,EACV,sBAAsB,EACtB,eAAe,EACf,IAAI,EACL,MAAM,mBAAmB,CAAC;AAE3B;;GAEG;AACH,wBAAgB,iBAAiB,CAC/B,QAAQ,EAAE,sBAAsB,EAAE,GACjC,sBAAsB,CAExB;AAED;;;GAGG;AACH,wBAAgB,6BAA6B,CAC3C,eAAe,EAAE,eAAe,EAChC,aAAa,EAAE,CAAC,OAAO,EAAE,MAAM,KAAK,MAAM,GACzC,eAAe,CAQjB;AAED;;;GAGG;AACH,wBAAgB,iBAAiB,CAAC,EAChC,eAAe,EACf,UAAU,EACV,QAA4B,EAC5B,QAAa,GACd,EAAE;IACD,eAAe,EAAE,MAAM,CAAC;IACxB,UAAU,EAAE,MAAM,CAAC;IACnB,QAAQ,CAAC,EAAE,MAAM,GAAG,SAAS,CAAC;IAC9B,QAAQ,CAAC,EAAE,MAAM,EAAE,CAAC;CACrB,GAAG,MAAM,CAQT;AAED;;;;;;GAMG;AACH,wBAAgB,YAAY,CAAC,EAC3B,QAAQ,EACR,IAAI,EAAE,YAAY,EAClB,IAAI,EACJ,OAAY,GACb,EAAE;IACD;;;OAGG;IACH,QAAQ,EAAE,IAAI,GAAG,KAAK,CAAC;IACvB,mDAAmD;IACnD,IAAI,EAAE,MAAM,CAAC;IACb,gCAAgC;IAChC,IAAI,EAAE,IAAI,CAAC;IACX,OAAO,CAAC,EAAE;QACR;;;;WAIG;QACH,YAAY,CAAC,EAAE,OAAO,CAAC;KACxB,CAAC;CACH,GAAG,MAAM,CAgBT"}

70
node_modules/@docusaurus/utils/lib/i18nUtils.js generated vendored Normal file
View File

@@ -0,0 +1,70 @@
"use strict";
/**
* Copyright (c) Facebook, Inc. and its affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree.
*/
Object.defineProperty(exports, "__esModule", { value: true });
exports.localizePath = exports.getPluginI18nPath = exports.updateTranslationFileMessages = exports.mergeTranslations = void 0;
const tslib_1 = require("tslib");
const path_1 = tslib_1.__importDefault(require("path"));
const lodash_1 = tslib_1.__importDefault(require("lodash"));
const constants_1 = require("./constants");
const urlUtils_1 = require("./urlUtils");
/**
* Takes a list of translation file contents, and shallow-merges them into one.
*/
function mergeTranslations(contents) {
return contents.reduce((acc, content) => ({ ...acc, ...content }), {});
}
exports.mergeTranslations = mergeTranslations;
/**
* Useful to update all the messages of a translation file. Used in tests to
* simulate translations.
*/
function updateTranslationFileMessages(translationFile, updateMessage) {
return {
...translationFile,
content: lodash_1.default.mapValues(translationFile.content, (translation) => ({
...translation,
message: updateMessage(translation.message),
})),
};
}
exports.updateTranslationFileMessages = updateTranslationFileMessages;
/**
* Takes everything needed and constructs a plugin i18n path. Plugins should
* expect everything it needs for translations to be found under this path.
*/
function getPluginI18nPath({ localizationDir, pluginName, pluginId = constants_1.DEFAULT_PLUGIN_ID, subPaths = [], }) {
return path_1.default.join(localizationDir,
// Make it convenient to use for single-instance
// ie: return "docs", not "docs-default" nor "docs/default"
`${pluginName}${pluginId === constants_1.DEFAULT_PLUGIN_ID ? '' : `-${pluginId}`}`, ...subPaths);
}
exports.getPluginI18nPath = getPluginI18nPath;
/**
* Takes a path and returns a localized a version (which is basically `path +
* i18n.currentLocale`).
*
* This is used to resolve the `outDir` and `baseUrl` of each locale; it is NOT
* used to determine plugin localization file locations.
*/
function localizePath({ pathType, path: originalPath, i18n, options = {}, }) {
const shouldLocalizePath = options.localizePath ?? i18n.currentLocale !== i18n.defaultLocale;
if (!shouldLocalizePath) {
return originalPath;
}
// FS paths need special care, for Windows support. Note: we don't use the
// locale config's `path` here, because this function is used for resolving
// outDir, which must be the same as baseUrl. When we have the baseUrl config,
// we need to sync the two.
if (pathType === 'fs') {
return path_1.default.join(originalPath, i18n.currentLocale);
}
// Url paths; add a trailing slash so it's a valid base URL
return (0, urlUtils_1.normalizeUrl)([originalPath, i18n.currentLocale, '/']);
}
exports.localizePath = localizePath;
//# sourceMappingURL=i18nUtils.js.map

1
node_modules/@docusaurus/utils/lib/i18nUtils.js.map generated vendored Normal file
View File

@@ -0,0 +1 @@
{"version":3,"file":"i18nUtils.js","sourceRoot":"","sources":["../src/i18nUtils.ts"],"names":[],"mappings":";AAAA;;;;;GAKG;;;;AAEH,wDAAwB;AACxB,4DAAuB;AACvB,2CAA8C;AAC9C,yCAAwC;AAOxC;;GAEG;AACH,SAAgB,iBAAiB,CAC/B,QAAkC;IAElC,OAAO,QAAQ,CAAC,MAAM,CAAC,CAAC,GAAG,EAAE,OAAO,EAAE,EAAE,CAAC,CAAC,EAAC,GAAG,GAAG,EAAE,GAAG,OAAO,EAAC,CAAC,EAAE,EAAE,CAAC,CAAC;AACvE,CAAC;AAJD,8CAIC;AAED;;;GAGG;AACH,SAAgB,6BAA6B,CAC3C,eAAgC,EAChC,aAA0C;IAE1C,OAAO;QACL,GAAG,eAAe;QAClB,OAAO,EAAE,gBAAC,CAAC,SAAS,CAAC,eAAe,CAAC,OAAO,EAAE,CAAC,WAAW,EAAE,EAAE,CAAC,CAAC;YAC9D,GAAG,WAAW;YACd,OAAO,EAAE,aAAa,CAAC,WAAW,CAAC,OAAO,CAAC;SAC5C,CAAC,CAAC;KACJ,CAAC;AACJ,CAAC;AAXD,sEAWC;AAED;;;GAGG;AACH,SAAgB,iBAAiB,CAAC,EAChC,eAAe,EACf,UAAU,EACV,QAAQ,GAAG,6BAAiB,EAC5B,QAAQ,GAAG,EAAE,GAMd;IACC,OAAO,cAAI,CAAC,IAAI,CACd,eAAe;IACf,gDAAgD;IAChD,2DAA2D;IAC3D,GAAG,UAAU,GAAG,QAAQ,KAAK,6BAAiB,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,IAAI,QAAQ,EAAE,EAAE,EACtE,GAAG,QAAQ,CACZ,CAAC;AACJ,CAAC;AAlBD,8CAkBC;AAED;;;;;;GAMG;AACH,SAAgB,YAAY,CAAC,EAC3B,QAAQ,EACR,IAAI,EAAE,YAAY,EAClB,IAAI,EACJ,OAAO,GAAG,EAAE,GAmBb;IACC,MAAM,kBAAkB,GACtB,OAAO,CAAC,YAAY,IAAI,IAAI,CAAC,aAAa,KAAK,IAAI,CAAC,aAAa,CAAC;IAEpE,IAAI,CAAC,kBAAkB,EAAE;QACvB,OAAO,YAAY,CAAC;KACrB;IACD,0EAA0E;IAC1E,2EAA2E;IAC3E,8EAA8E;IAC9E,2BAA2B;IAC3B,IAAI,QAAQ,KAAK,IAAI,EAAE;QACrB,OAAO,cAAI,CAAC,IAAI,CAAC,YAAY,EAAE,IAAI,CAAC,aAAa,CAAC,CAAC;KACpD;IACD,2DAA2D;IAC3D,OAAO,IAAA,uBAAY,EAAC,CAAC,YAAY,EAAE,IAAI,CAAC,aAAa,EAAE,GAAG,CAAC,CAAC,CAAC;AAC/D,CAAC;AAvCD,oCAuCC"}

27
node_modules/@docusaurus/utils/lib/index.d.ts generated vendored Normal file
View File

@@ -0,0 +1,27 @@
/**
* Copyright (c) Facebook, Inc. and its affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree.
*/
export { NODE_MAJOR_VERSION, NODE_MINOR_VERSION, DOCUSAURUS_VERSION, DEFAULT_BUILD_DIR_NAME, DEFAULT_CONFIG_FILE_NAME, BABEL_CONFIG_FILE_NAME, GENERATED_FILES_DIR_NAME, SRC_DIR_NAME, DEFAULT_STATIC_DIR_NAME, OUTPUT_STATIC_ASSETS_DIR_NAME, THEME_PATH, DEFAULT_I18N_DIR_NAME, CODE_TRANSLATIONS_FILE_NAME, DEFAULT_PORT, DEFAULT_PLUGIN_ID, WEBPACK_URL_LOADER_LIMIT, } from './constants';
export { generate, readOutputHTMLFile } from './emitUtils';
export { getFileCommitDate, FileNotTrackedError, GitNotFoundError, } from './gitUtils';
export { mergeTranslations, updateTranslationFileMessages, getPluginI18nPath, localizePath, } from './i18nUtils';
export { removeSuffix, removePrefix, mapAsyncSequential, findAsyncSequential, } from './jsUtils';
export { normalizeUrl, getEditUrl, fileToPath, encodePath, isValidPathname, resolvePathname, parseURLPath, serializeURLPath, addLeadingSlash, addTrailingSlash, removeTrailingSlash, hasSSHProtocol, buildHttpsUrl, buildSshUrl, } from './urlUtils';
export type { URLPath } from './urlUtils';
export { type Tag, type TagsListItem, type TagModule, type FrontMatterTag, normalizeFrontMatterTags, groupTaggedItems, getTagVisibility, } from './tags';
export { parseMarkdownHeadingId, escapeMarkdownHeadingIds, unwrapMdxCodeBlocks, admonitionTitleToDirectiveLabel, createExcerpt, DEFAULT_PARSE_FRONT_MATTER, parseMarkdownContentTitle, parseMarkdownFile, writeMarkdownHeadingId, type WriteHeadingIDOptions, } from './markdownUtils';
export { type ContentPaths, type BrokenMarkdownLink, replaceMarkdownLinks, } from './markdownLinks';
export { type SluggerOptions, type Slugger, createSlugger } from './slugger';
export { isNameTooLong, shortName, posixPath, toMessageRelativeFilePath, aliasedSitePath, escapePath, addTrailingPathSeparator, } from './pathUtils';
export { md5Hash, simpleHash, docuHash } from './hashUtils';
export { Globby, GlobExcludeDefault, createMatcher, createAbsoluteFilePathMatcher, } from './globUtils';
export { getFileLoaderUtils, getWebpackLoaderCompilerName, type WebpackCompilerName, } from './webpackUtils';
export { escapeShellArg } from './shellUtils';
export { loadFreshModule } from './moduleUtils';
export { getDataFilePath, getDataFileData, getContentPathList, findFolderContainingFile, getFolderContainingFile, } from './dataFileUtils';
export { isDraft, isUnlisted } from './contentVisibilityUtils';
export { escapeRegexp } from './regExpUtils';
//# sourceMappingURL=index.d.ts.map

1
node_modules/@docusaurus/utils/lib/index.d.ts.map generated vendored Normal file
View File

@@ -0,0 +1 @@
{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../src/index.ts"],"names":[],"mappings":"AAAA;;;;;GAKG;AAEH,OAAO,EACL,kBAAkB,EAClB,kBAAkB,EAClB,kBAAkB,EAClB,sBAAsB,EACtB,wBAAwB,EACxB,sBAAsB,EACtB,wBAAwB,EACxB,YAAY,EACZ,uBAAuB,EACvB,6BAA6B,EAC7B,UAAU,EACV,qBAAqB,EACrB,2BAA2B,EAC3B,YAAY,EACZ,iBAAiB,EACjB,wBAAwB,GACzB,MAAM,aAAa,CAAC;AACrB,OAAO,EAAC,QAAQ,EAAE,kBAAkB,EAAC,MAAM,aAAa,CAAC;AACzD,OAAO,EACL,iBAAiB,EACjB,mBAAmB,EACnB,gBAAgB,GACjB,MAAM,YAAY,CAAC;AACpB,OAAO,EACL,iBAAiB,EACjB,6BAA6B,EAC7B,iBAAiB,EACjB,YAAY,GACb,MAAM,aAAa,CAAC;AACrB,OAAO,EACL,YAAY,EACZ,YAAY,EACZ,kBAAkB,EAClB,mBAAmB,GACpB,MAAM,WAAW,CAAC;AACnB,OAAO,EACL,YAAY,EACZ,UAAU,EACV,UAAU,EACV,UAAU,EACV,eAAe,EACf,eAAe,EACf,YAAY,EACZ,gBAAgB,EAChB,eAAe,EACf,gBAAgB,EAChB,mBAAmB,EACnB,cAAc,EACd,aAAa,EACb,WAAW,GACZ,MAAM,YAAY,CAAC;AACpB,YAAY,EAAC,OAAO,EAAC,MAAM,YAAY,CAAC;AACxC,OAAO,EACL,KAAK,GAAG,EACR,KAAK,YAAY,EACjB,KAAK,SAAS,EACd,KAAK,cAAc,EACnB,wBAAwB,EACxB,gBAAgB,EAChB,gBAAgB,GACjB,MAAM,QAAQ,CAAC;AAChB,OAAO,EACL,sBAAsB,EACtB,wBAAwB,EACxB,mBAAmB,EACnB,+BAA+B,EAC/B,aAAa,EACb,0BAA0B,EAC1B,yBAAyB,EACzB,iBAAiB,EACjB,sBAAsB,EACtB,KAAK,qBAAqB,GAC3B,MAAM,iBAAiB,CAAC;AACzB,OAAO,EACL,KAAK,YAAY,EACjB,KAAK,kBAAkB,EACvB,oBAAoB,GACrB,MAAM,iBAAiB,CAAC;AACzB,OAAO,EAAC,KAAK,cAAc,EAAE,KAAK,OAAO,EAAE,aAAa,EAAC,MAAM,WAAW,CAAC;AAC3E,OAAO,EACL,aAAa,EACb,SAAS,EACT,SAAS,EACT,yBAAyB,EACzB,eAAe,EACf,UAAU,EACV,wBAAwB,GACzB,MAAM,aAAa,CAAC;AACrB,OAAO,EAAC,OAAO,EAAE,UAAU,EAAE,QAAQ,EAAC,MAAM,aAAa,CAAC;AAC1D,OAAO,EACL,MAAM,EACN,kBAAkB,EAClB,aAAa,EACb,6BAA6B,GAC9B,MAAM,aAAa,CAAC;AACrB,OAAO,EACL,kBAAkB,EAClB,4BAA4B,EAC5B,KAAK,mBAAmB,GACzB,MAAM,gBAAgB,CAAC;AACxB,OAAO,EAAC,cAAc,EAAC,MAAM,cAAc,CAAC;AAC5C,OAAO,EAAC,eAAe,EAAC,MAAM,eAAe,CAAC;AAC9C,OAAO,EACL,eAAe,EACf,eAAe,EACf,kBAAkB,EAClB,wBAAwB,EACxB,uBAAuB,GACxB,MAAM,iBAAiB,CAAC;AACzB,OAAO,EAAC,OAAO,EAAE,UAAU,EAAC,MAAM,0BAA0B,CAAC;AAC7D,OAAO,EAAC,YAAY,EAAC,MAAM,eAAe,CAAC"}

113
node_modules/@docusaurus/utils/lib/index.js generated vendored Normal file
View File

@@ -0,0 +1,113 @@
"use strict";
/**
* Copyright (c) Facebook, Inc. and its affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree.
*/
Object.defineProperty(exports, "__esModule", { value: true });
exports.admonitionTitleToDirectiveLabel = exports.unwrapMdxCodeBlocks = exports.escapeMarkdownHeadingIds = exports.parseMarkdownHeadingId = exports.getTagVisibility = exports.groupTaggedItems = exports.normalizeFrontMatterTags = exports.buildSshUrl = exports.buildHttpsUrl = exports.hasSSHProtocol = exports.removeTrailingSlash = exports.addTrailingSlash = exports.addLeadingSlash = exports.serializeURLPath = exports.parseURLPath = exports.resolvePathname = exports.isValidPathname = exports.encodePath = exports.fileToPath = exports.getEditUrl = exports.normalizeUrl = exports.findAsyncSequential = exports.mapAsyncSequential = exports.removePrefix = exports.removeSuffix = exports.localizePath = exports.getPluginI18nPath = exports.updateTranslationFileMessages = exports.mergeTranslations = exports.GitNotFoundError = exports.FileNotTrackedError = exports.getFileCommitDate = exports.readOutputHTMLFile = exports.generate = exports.WEBPACK_URL_LOADER_LIMIT = exports.DEFAULT_PLUGIN_ID = exports.DEFAULT_PORT = exports.CODE_TRANSLATIONS_FILE_NAME = exports.DEFAULT_I18N_DIR_NAME = exports.THEME_PATH = exports.OUTPUT_STATIC_ASSETS_DIR_NAME = exports.DEFAULT_STATIC_DIR_NAME = exports.SRC_DIR_NAME = exports.GENERATED_FILES_DIR_NAME = exports.BABEL_CONFIG_FILE_NAME = exports.DEFAULT_CONFIG_FILE_NAME = exports.DEFAULT_BUILD_DIR_NAME = exports.DOCUSAURUS_VERSION = exports.NODE_MINOR_VERSION = exports.NODE_MAJOR_VERSION = void 0;
exports.escapeRegexp = exports.isUnlisted = exports.isDraft = exports.getFolderContainingFile = exports.findFolderContainingFile = exports.getContentPathList = exports.getDataFileData = exports.getDataFilePath = exports.loadFreshModule = exports.escapeShellArg = exports.getWebpackLoaderCompilerName = exports.getFileLoaderUtils = exports.createAbsoluteFilePathMatcher = exports.createMatcher = exports.GlobExcludeDefault = exports.Globby = exports.docuHash = exports.simpleHash = exports.md5Hash = exports.addTrailingPathSeparator = exports.escapePath = exports.aliasedSitePath = exports.toMessageRelativeFilePath = exports.posixPath = exports.shortName = exports.isNameTooLong = exports.createSlugger = exports.replaceMarkdownLinks = exports.writeMarkdownHeadingId = exports.parseMarkdownFile = exports.parseMarkdownContentTitle = exports.DEFAULT_PARSE_FRONT_MATTER = exports.createExcerpt = void 0;
var constants_1 = require("./constants");
Object.defineProperty(exports, "NODE_MAJOR_VERSION", { enumerable: true, get: function () { return constants_1.NODE_MAJOR_VERSION; } });
Object.defineProperty(exports, "NODE_MINOR_VERSION", { enumerable: true, get: function () { return constants_1.NODE_MINOR_VERSION; } });
Object.defineProperty(exports, "DOCUSAURUS_VERSION", { enumerable: true, get: function () { return constants_1.DOCUSAURUS_VERSION; } });
Object.defineProperty(exports, "DEFAULT_BUILD_DIR_NAME", { enumerable: true, get: function () { return constants_1.DEFAULT_BUILD_DIR_NAME; } });
Object.defineProperty(exports, "DEFAULT_CONFIG_FILE_NAME", { enumerable: true, get: function () { return constants_1.DEFAULT_CONFIG_FILE_NAME; } });
Object.defineProperty(exports, "BABEL_CONFIG_FILE_NAME", { enumerable: true, get: function () { return constants_1.BABEL_CONFIG_FILE_NAME; } });
Object.defineProperty(exports, "GENERATED_FILES_DIR_NAME", { enumerable: true, get: function () { return constants_1.GENERATED_FILES_DIR_NAME; } });
Object.defineProperty(exports, "SRC_DIR_NAME", { enumerable: true, get: function () { return constants_1.SRC_DIR_NAME; } });
Object.defineProperty(exports, "DEFAULT_STATIC_DIR_NAME", { enumerable: true, get: function () { return constants_1.DEFAULT_STATIC_DIR_NAME; } });
Object.defineProperty(exports, "OUTPUT_STATIC_ASSETS_DIR_NAME", { enumerable: true, get: function () { return constants_1.OUTPUT_STATIC_ASSETS_DIR_NAME; } });
Object.defineProperty(exports, "THEME_PATH", { enumerable: true, get: function () { return constants_1.THEME_PATH; } });
Object.defineProperty(exports, "DEFAULT_I18N_DIR_NAME", { enumerable: true, get: function () { return constants_1.DEFAULT_I18N_DIR_NAME; } });
Object.defineProperty(exports, "CODE_TRANSLATIONS_FILE_NAME", { enumerable: true, get: function () { return constants_1.CODE_TRANSLATIONS_FILE_NAME; } });
Object.defineProperty(exports, "DEFAULT_PORT", { enumerable: true, get: function () { return constants_1.DEFAULT_PORT; } });
Object.defineProperty(exports, "DEFAULT_PLUGIN_ID", { enumerable: true, get: function () { return constants_1.DEFAULT_PLUGIN_ID; } });
Object.defineProperty(exports, "WEBPACK_URL_LOADER_LIMIT", { enumerable: true, get: function () { return constants_1.WEBPACK_URL_LOADER_LIMIT; } });
var emitUtils_1 = require("./emitUtils");
Object.defineProperty(exports, "generate", { enumerable: true, get: function () { return emitUtils_1.generate; } });
Object.defineProperty(exports, "readOutputHTMLFile", { enumerable: true, get: function () { return emitUtils_1.readOutputHTMLFile; } });
var gitUtils_1 = require("./gitUtils");
Object.defineProperty(exports, "getFileCommitDate", { enumerable: true, get: function () { return gitUtils_1.getFileCommitDate; } });
Object.defineProperty(exports, "FileNotTrackedError", { enumerable: true, get: function () { return gitUtils_1.FileNotTrackedError; } });
Object.defineProperty(exports, "GitNotFoundError", { enumerable: true, get: function () { return gitUtils_1.GitNotFoundError; } });
var i18nUtils_1 = require("./i18nUtils");
Object.defineProperty(exports, "mergeTranslations", { enumerable: true, get: function () { return i18nUtils_1.mergeTranslations; } });
Object.defineProperty(exports, "updateTranslationFileMessages", { enumerable: true, get: function () { return i18nUtils_1.updateTranslationFileMessages; } });
Object.defineProperty(exports, "getPluginI18nPath", { enumerable: true, get: function () { return i18nUtils_1.getPluginI18nPath; } });
Object.defineProperty(exports, "localizePath", { enumerable: true, get: function () { return i18nUtils_1.localizePath; } });
var jsUtils_1 = require("./jsUtils");
Object.defineProperty(exports, "removeSuffix", { enumerable: true, get: function () { return jsUtils_1.removeSuffix; } });
Object.defineProperty(exports, "removePrefix", { enumerable: true, get: function () { return jsUtils_1.removePrefix; } });
Object.defineProperty(exports, "mapAsyncSequential", { enumerable: true, get: function () { return jsUtils_1.mapAsyncSequential; } });
Object.defineProperty(exports, "findAsyncSequential", { enumerable: true, get: function () { return jsUtils_1.findAsyncSequential; } });
var urlUtils_1 = require("./urlUtils");
Object.defineProperty(exports, "normalizeUrl", { enumerable: true, get: function () { return urlUtils_1.normalizeUrl; } });
Object.defineProperty(exports, "getEditUrl", { enumerable: true, get: function () { return urlUtils_1.getEditUrl; } });
Object.defineProperty(exports, "fileToPath", { enumerable: true, get: function () { return urlUtils_1.fileToPath; } });
Object.defineProperty(exports, "encodePath", { enumerable: true, get: function () { return urlUtils_1.encodePath; } });
Object.defineProperty(exports, "isValidPathname", { enumerable: true, get: function () { return urlUtils_1.isValidPathname; } });
Object.defineProperty(exports, "resolvePathname", { enumerable: true, get: function () { return urlUtils_1.resolvePathname; } });
Object.defineProperty(exports, "parseURLPath", { enumerable: true, get: function () { return urlUtils_1.parseURLPath; } });
Object.defineProperty(exports, "serializeURLPath", { enumerable: true, get: function () { return urlUtils_1.serializeURLPath; } });
Object.defineProperty(exports, "addLeadingSlash", { enumerable: true, get: function () { return urlUtils_1.addLeadingSlash; } });
Object.defineProperty(exports, "addTrailingSlash", { enumerable: true, get: function () { return urlUtils_1.addTrailingSlash; } });
Object.defineProperty(exports, "removeTrailingSlash", { enumerable: true, get: function () { return urlUtils_1.removeTrailingSlash; } });
Object.defineProperty(exports, "hasSSHProtocol", { enumerable: true, get: function () { return urlUtils_1.hasSSHProtocol; } });
Object.defineProperty(exports, "buildHttpsUrl", { enumerable: true, get: function () { return urlUtils_1.buildHttpsUrl; } });
Object.defineProperty(exports, "buildSshUrl", { enumerable: true, get: function () { return urlUtils_1.buildSshUrl; } });
var tags_1 = require("./tags");
Object.defineProperty(exports, "normalizeFrontMatterTags", { enumerable: true, get: function () { return tags_1.normalizeFrontMatterTags; } });
Object.defineProperty(exports, "groupTaggedItems", { enumerable: true, get: function () { return tags_1.groupTaggedItems; } });
Object.defineProperty(exports, "getTagVisibility", { enumerable: true, get: function () { return tags_1.getTagVisibility; } });
var markdownUtils_1 = require("./markdownUtils");
Object.defineProperty(exports, "parseMarkdownHeadingId", { enumerable: true, get: function () { return markdownUtils_1.parseMarkdownHeadingId; } });
Object.defineProperty(exports, "escapeMarkdownHeadingIds", { enumerable: true, get: function () { return markdownUtils_1.escapeMarkdownHeadingIds; } });
Object.defineProperty(exports, "unwrapMdxCodeBlocks", { enumerable: true, get: function () { return markdownUtils_1.unwrapMdxCodeBlocks; } });
Object.defineProperty(exports, "admonitionTitleToDirectiveLabel", { enumerable: true, get: function () { return markdownUtils_1.admonitionTitleToDirectiveLabel; } });
Object.defineProperty(exports, "createExcerpt", { enumerable: true, get: function () { return markdownUtils_1.createExcerpt; } });
Object.defineProperty(exports, "DEFAULT_PARSE_FRONT_MATTER", { enumerable: true, get: function () { return markdownUtils_1.DEFAULT_PARSE_FRONT_MATTER; } });
Object.defineProperty(exports, "parseMarkdownContentTitle", { enumerable: true, get: function () { return markdownUtils_1.parseMarkdownContentTitle; } });
Object.defineProperty(exports, "parseMarkdownFile", { enumerable: true, get: function () { return markdownUtils_1.parseMarkdownFile; } });
Object.defineProperty(exports, "writeMarkdownHeadingId", { enumerable: true, get: function () { return markdownUtils_1.writeMarkdownHeadingId; } });
var markdownLinks_1 = require("./markdownLinks");
Object.defineProperty(exports, "replaceMarkdownLinks", { enumerable: true, get: function () { return markdownLinks_1.replaceMarkdownLinks; } });
var slugger_1 = require("./slugger");
Object.defineProperty(exports, "createSlugger", { enumerable: true, get: function () { return slugger_1.createSlugger; } });
var pathUtils_1 = require("./pathUtils");
Object.defineProperty(exports, "isNameTooLong", { enumerable: true, get: function () { return pathUtils_1.isNameTooLong; } });
Object.defineProperty(exports, "shortName", { enumerable: true, get: function () { return pathUtils_1.shortName; } });
Object.defineProperty(exports, "posixPath", { enumerable: true, get: function () { return pathUtils_1.posixPath; } });
Object.defineProperty(exports, "toMessageRelativeFilePath", { enumerable: true, get: function () { return pathUtils_1.toMessageRelativeFilePath; } });
Object.defineProperty(exports, "aliasedSitePath", { enumerable: true, get: function () { return pathUtils_1.aliasedSitePath; } });
Object.defineProperty(exports, "escapePath", { enumerable: true, get: function () { return pathUtils_1.escapePath; } });
Object.defineProperty(exports, "addTrailingPathSeparator", { enumerable: true, get: function () { return pathUtils_1.addTrailingPathSeparator; } });
var hashUtils_1 = require("./hashUtils");
Object.defineProperty(exports, "md5Hash", { enumerable: true, get: function () { return hashUtils_1.md5Hash; } });
Object.defineProperty(exports, "simpleHash", { enumerable: true, get: function () { return hashUtils_1.simpleHash; } });
Object.defineProperty(exports, "docuHash", { enumerable: true, get: function () { return hashUtils_1.docuHash; } });
var globUtils_1 = require("./globUtils");
Object.defineProperty(exports, "Globby", { enumerable: true, get: function () { return globUtils_1.Globby; } });
Object.defineProperty(exports, "GlobExcludeDefault", { enumerable: true, get: function () { return globUtils_1.GlobExcludeDefault; } });
Object.defineProperty(exports, "createMatcher", { enumerable: true, get: function () { return globUtils_1.createMatcher; } });
Object.defineProperty(exports, "createAbsoluteFilePathMatcher", { enumerable: true, get: function () { return globUtils_1.createAbsoluteFilePathMatcher; } });
var webpackUtils_1 = require("./webpackUtils");
Object.defineProperty(exports, "getFileLoaderUtils", { enumerable: true, get: function () { return webpackUtils_1.getFileLoaderUtils; } });
Object.defineProperty(exports, "getWebpackLoaderCompilerName", { enumerable: true, get: function () { return webpackUtils_1.getWebpackLoaderCompilerName; } });
var shellUtils_1 = require("./shellUtils");
Object.defineProperty(exports, "escapeShellArg", { enumerable: true, get: function () { return shellUtils_1.escapeShellArg; } });
var moduleUtils_1 = require("./moduleUtils");
Object.defineProperty(exports, "loadFreshModule", { enumerable: true, get: function () { return moduleUtils_1.loadFreshModule; } });
var dataFileUtils_1 = require("./dataFileUtils");
Object.defineProperty(exports, "getDataFilePath", { enumerable: true, get: function () { return dataFileUtils_1.getDataFilePath; } });
Object.defineProperty(exports, "getDataFileData", { enumerable: true, get: function () { return dataFileUtils_1.getDataFileData; } });
Object.defineProperty(exports, "getContentPathList", { enumerable: true, get: function () { return dataFileUtils_1.getContentPathList; } });
Object.defineProperty(exports, "findFolderContainingFile", { enumerable: true, get: function () { return dataFileUtils_1.findFolderContainingFile; } });
Object.defineProperty(exports, "getFolderContainingFile", { enumerable: true, get: function () { return dataFileUtils_1.getFolderContainingFile; } });
var contentVisibilityUtils_1 = require("./contentVisibilityUtils");
Object.defineProperty(exports, "isDraft", { enumerable: true, get: function () { return contentVisibilityUtils_1.isDraft; } });
Object.defineProperty(exports, "isUnlisted", { enumerable: true, get: function () { return contentVisibilityUtils_1.isUnlisted; } });
var regExpUtils_1 = require("./regExpUtils");
Object.defineProperty(exports, "escapeRegexp", { enumerable: true, get: function () { return regExpUtils_1.escapeRegexp; } });
//# sourceMappingURL=index.js.map

1
node_modules/@docusaurus/utils/lib/index.js.map generated vendored Normal file
View File

@@ -0,0 +1 @@
{"version":3,"file":"index.js","sourceRoot":"","sources":["../src/index.ts"],"names":[],"mappings":";AAAA;;;;;GAKG;;;;AAEH,yCAiBqB;AAhBnB,+GAAA,kBAAkB,OAAA;AAClB,+GAAA,kBAAkB,OAAA;AAClB,+GAAA,kBAAkB,OAAA;AAClB,mHAAA,sBAAsB,OAAA;AACtB,qHAAA,wBAAwB,OAAA;AACxB,mHAAA,sBAAsB,OAAA;AACtB,qHAAA,wBAAwB,OAAA;AACxB,yGAAA,YAAY,OAAA;AACZ,oHAAA,uBAAuB,OAAA;AACvB,0HAAA,6BAA6B,OAAA;AAC7B,uGAAA,UAAU,OAAA;AACV,kHAAA,qBAAqB,OAAA;AACrB,wHAAA,2BAA2B,OAAA;AAC3B,yGAAA,YAAY,OAAA;AACZ,8GAAA,iBAAiB,OAAA;AACjB,qHAAA,wBAAwB,OAAA;AAE1B,yCAAyD;AAAjD,qGAAA,QAAQ,OAAA;AAAE,+GAAA,kBAAkB,OAAA;AACpC,uCAIoB;AAHlB,6GAAA,iBAAiB,OAAA;AACjB,+GAAA,mBAAmB,OAAA;AACnB,4GAAA,gBAAgB,OAAA;AAElB,yCAKqB;AAJnB,8GAAA,iBAAiB,OAAA;AACjB,0HAAA,6BAA6B,OAAA;AAC7B,8GAAA,iBAAiB,OAAA;AACjB,yGAAA,YAAY,OAAA;AAEd,qCAKmB;AAJjB,uGAAA,YAAY,OAAA;AACZ,uGAAA,YAAY,OAAA;AACZ,6GAAA,kBAAkB,OAAA;AAClB,8GAAA,mBAAmB,OAAA;AAErB,uCAeoB;AAdlB,wGAAA,YAAY,OAAA;AACZ,sGAAA,UAAU,OAAA;AACV,sGAAA,UAAU,OAAA;AACV,sGAAA,UAAU,OAAA;AACV,2GAAA,eAAe,OAAA;AACf,2GAAA,eAAe,OAAA;AACf,wGAAA,YAAY,OAAA;AACZ,4GAAA,gBAAgB,OAAA;AAChB,2GAAA,eAAe,OAAA;AACf,4GAAA,gBAAgB,OAAA;AAChB,+GAAA,mBAAmB,OAAA;AACnB,0GAAA,cAAc,OAAA;AACd,yGAAA,aAAa,OAAA;AACb,uGAAA,WAAW,OAAA;AAGb,+BAQgB;AAHd,gHAAA,wBAAwB,OAAA;AACxB,wGAAA,gBAAgB,OAAA;AAChB,wGAAA,gBAAgB,OAAA;AAElB,iDAWyB;AAVvB,uHAAA,sBAAsB,OAAA;AACtB,yHAAA,wBAAwB,OAAA;AACxB,oHAAA,mBAAmB,OAAA;AACnB,gIAAA,+BAA+B,OAAA;AAC/B,8GAAA,aAAa,OAAA;AACb,2HAAA,0BAA0B,OAAA;AAC1B,0HAAA,yBAAyB,OAAA;AACzB,kHAAA,iBAAiB,OAAA;AACjB,uHAAA,sBAAsB,OAAA;AAGxB,iDAIyB;AADvB,qHAAA,oBAAoB,OAAA;AAEtB,qCAA2E;AAAhC,wGAAA,aAAa,OAAA;AACxD,yCAQqB;AAPnB,0GAAA,aAAa,OAAA;AACb,sGAAA,SAAS,OAAA;AACT,sGAAA,SAAS,OAAA;AACT,sHAAA,yBAAyB,OAAA;AACzB,4GAAA,eAAe,OAAA;AACf,uGAAA,UAAU,OAAA;AACV,qHAAA,wBAAwB,OAAA;AAE1B,yCAA0D;AAAlD,oGAAA,OAAO,OAAA;AAAE,uGAAA,UAAU,OAAA;AAAE,qGAAA,QAAQ,OAAA;AACrC,yCAKqB;AAJnB,mGAAA,MAAM,OAAA;AACN,+GAAA,kBAAkB,OAAA;AAClB,0GAAA,aAAa,OAAA;AACb,0HAAA,6BAA6B,OAAA;AAE/B,+CAIwB;AAHtB,kHAAA,kBAAkB,OAAA;AAClB,4HAAA,4BAA4B,OAAA;AAG9B,2CAA4C;AAApC,4GAAA,cAAc,OAAA;AACtB,6CAA8C;AAAtC,8GAAA,eAAe,OAAA;AACvB,iDAMyB;AALvB,gHAAA,eAAe,OAAA;AACf,gHAAA,eAAe,OAAA;AACf,mHAAA,kBAAkB,OAAA;AAClB,yHAAA,wBAAwB,OAAA;AACxB,wHAAA,uBAAuB,OAAA;AAEzB,mEAA6D;AAArD,iHAAA,OAAO,OAAA;AAAE,oHAAA,UAAU,OAAA;AAC3B,6CAA2C;AAAnC,2GAAA,YAAY,OAAA"}

32
node_modules/@docusaurus/utils/lib/jsUtils.d.ts generated vendored Normal file
View File

@@ -0,0 +1,32 @@
/**
* Copyright (c) Facebook, Inc. and its affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree.
*/
/** Adds a given string prefix to `str`. */
export declare function addPrefix(str: string, prefix: string): string;
/** Adds a given string suffix to `str`. */
export declare function addSuffix(str: string, suffix: string): string;
/** Removes a given string suffix from `str`. */
export declare function removeSuffix(str: string, suffix: string): string;
/** Removes a given string prefix from `str`. */
export declare function removePrefix(str: string, prefix: string): string;
/**
* `Array#map` for async operations where order matters.
* @param array The array to traverse.
* @param action An async action to be performed on every array item. Will be
* awaited before working on the next.
* @returns The list of results returned from every `action(item)`
*/
export declare function mapAsyncSequential<T, R>(array: T[], action: (t: T) => Promise<R>): Promise<R[]>;
/**
* `Array#find` for async operations where order matters.
* @param array The array to traverse.
* @param predicate An async predicate to be called on every array item. Should
* return a boolean indicating whether the currently element should be returned.
* @returns The function immediately returns the first item on which `predicate`
* returns `true`, or `undefined` if none matches the predicate.
*/
export declare function findAsyncSequential<T>(array: T[], predicate: (t: T) => Promise<boolean>): Promise<T | undefined>;
//# sourceMappingURL=jsUtils.d.ts.map

1
node_modules/@docusaurus/utils/lib/jsUtils.d.ts.map generated vendored Normal file
View File

@@ -0,0 +1 @@
{"version":3,"file":"jsUtils.d.ts","sourceRoot":"","sources":["../src/jsUtils.ts"],"names":[],"mappings":"AAAA;;;;;GAKG;AAEH,2CAA2C;AAC3C,wBAAgB,SAAS,CAAC,GAAG,EAAE,MAAM,EAAE,MAAM,EAAE,MAAM,GAAG,MAAM,CAE7D;AAED,2CAA2C;AAC3C,wBAAgB,SAAS,CAAC,GAAG,EAAE,MAAM,EAAE,MAAM,EAAE,MAAM,GAAG,MAAM,CAE7D;AAED,gDAAgD;AAChD,wBAAgB,YAAY,CAAC,GAAG,EAAE,MAAM,EAAE,MAAM,EAAE,MAAM,GAAG,MAAM,CAMhE;AAED,gDAAgD;AAChD,wBAAgB,YAAY,CAAC,GAAG,EAAE,MAAM,EAAE,MAAM,EAAE,MAAM,GAAG,MAAM,CAEhE;AAED;;;;;;GAMG;AACH,wBAAsB,kBAAkB,CAAC,CAAC,EAAE,CAAC,EAC3C,KAAK,EAAE,CAAC,EAAE,EACV,MAAM,EAAE,CAAC,CAAC,EAAE,CAAC,KAAK,OAAO,CAAC,CAAC,CAAC,GAC3B,OAAO,CAAC,CAAC,EAAE,CAAC,CAOd;AAED;;;;;;;GAOG;AACH,wBAAsB,mBAAmB,CAAC,CAAC,EACzC,KAAK,EAAE,CAAC,EAAE,EACV,SAAS,EAAE,CAAC,CAAC,EAAE,CAAC,KAAK,OAAO,CAAC,OAAO,CAAC,GACpC,OAAO,CAAC,CAAC,GAAG,SAAS,CAAC,CAOxB"}

67
node_modules/@docusaurus/utils/lib/jsUtils.js generated vendored Normal file
View File

@@ -0,0 +1,67 @@
"use strict";
/**
* Copyright (c) Facebook, Inc. and its affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree.
*/
Object.defineProperty(exports, "__esModule", { value: true });
exports.findAsyncSequential = exports.mapAsyncSequential = exports.removePrefix = exports.removeSuffix = exports.addSuffix = exports.addPrefix = void 0;
/** Adds a given string prefix to `str`. */
function addPrefix(str, prefix) {
return str.startsWith(prefix) ? str : `${prefix}${str}`;
}
exports.addPrefix = addPrefix;
/** Adds a given string suffix to `str`. */
function addSuffix(str, suffix) {
return str.endsWith(suffix) ? str : `${str}${suffix}`;
}
exports.addSuffix = addSuffix;
/** Removes a given string suffix from `str`. */
function removeSuffix(str, suffix) {
if (suffix === '') {
// str.slice(0, 0) is ""
return str;
}
return str.endsWith(suffix) ? str.slice(0, -suffix.length) : str;
}
exports.removeSuffix = removeSuffix;
/** Removes a given string prefix from `str`. */
function removePrefix(str, prefix) {
return str.startsWith(prefix) ? str.slice(prefix.length) : str;
}
exports.removePrefix = removePrefix;
/**
* `Array#map` for async operations where order matters.
* @param array The array to traverse.
* @param action An async action to be performed on every array item. Will be
* awaited before working on the next.
* @returns The list of results returned from every `action(item)`
*/
async function mapAsyncSequential(array, action) {
const results = [];
for (const t of array) {
const result = await action(t);
results.push(result);
}
return results;
}
exports.mapAsyncSequential = mapAsyncSequential;
/**
* `Array#find` for async operations where order matters.
* @param array The array to traverse.
* @param predicate An async predicate to be called on every array item. Should
* return a boolean indicating whether the currently element should be returned.
* @returns The function immediately returns the first item on which `predicate`
* returns `true`, or `undefined` if none matches the predicate.
*/
async function findAsyncSequential(array, predicate) {
for (const t of array) {
if (await predicate(t)) {
return t;
}
}
return undefined;
}
exports.findAsyncSequential = findAsyncSequential;
//# sourceMappingURL=jsUtils.js.map

1
node_modules/@docusaurus/utils/lib/jsUtils.js.map generated vendored Normal file
View File

@@ -0,0 +1 @@
{"version":3,"file":"jsUtils.js","sourceRoot":"","sources":["../src/jsUtils.ts"],"names":[],"mappings":";AAAA;;;;;GAKG;;;AAEH,2CAA2C;AAC3C,SAAgB,SAAS,CAAC,GAAW,EAAE,MAAc;IACnD,OAAO,GAAG,CAAC,UAAU,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,GAAG,MAAM,GAAG,GAAG,EAAE,CAAC;AAC1D,CAAC;AAFD,8BAEC;AAED,2CAA2C;AAC3C,SAAgB,SAAS,CAAC,GAAW,EAAE,MAAc;IACnD,OAAO,GAAG,CAAC,QAAQ,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,GAAG,GAAG,GAAG,MAAM,EAAE,CAAC;AACxD,CAAC;AAFD,8BAEC;AAED,gDAAgD;AAChD,SAAgB,YAAY,CAAC,GAAW,EAAE,MAAc;IACtD,IAAI,MAAM,KAAK,EAAE,EAAE;QACjB,wBAAwB;QACxB,OAAO,GAAG,CAAC;KACZ;IACD,OAAO,GAAG,CAAC,QAAQ,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC,EAAE,CAAC,MAAM,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC;AACnE,CAAC;AAND,oCAMC;AAED,gDAAgD;AAChD,SAAgB,YAAY,CAAC,GAAW,EAAE,MAAc;IACtD,OAAO,GAAG,CAAC,UAAU,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,KAAK,CAAC,MAAM,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC;AACjE,CAAC;AAFD,oCAEC;AAED;;;;;;GAMG;AACI,KAAK,UAAU,kBAAkB,CACtC,KAAU,EACV,MAA4B;IAE5B,MAAM,OAAO,GAAQ,EAAE,CAAC;IACxB,KAAK,MAAM,CAAC,IAAI,KAAK,EAAE;QACrB,MAAM,MAAM,GAAG,MAAM,MAAM,CAAC,CAAC,CAAC,CAAC;QAC/B,OAAO,CAAC,IAAI,CAAC,MAAM,CAAC,CAAC;KACtB;IACD,OAAO,OAAO,CAAC;AACjB,CAAC;AAVD,gDAUC;AAED;;;;;;;GAOG;AACI,KAAK,UAAU,mBAAmB,CACvC,KAAU,EACV,SAAqC;IAErC,KAAK,MAAM,CAAC,IAAI,KAAK,EAAE;QACrB,IAAI,MAAM,SAAS,CAAC,CAAC,CAAC,EAAE;YACtB,OAAO,CAAC,CAAC;SACV;KACF;IACD,OAAO,SAAS,CAAC;AACnB,CAAC;AAVD,kDAUC"}

71
node_modules/@docusaurus/utils/lib/markdownLinks.d.ts generated vendored Normal file
View File

@@ -0,0 +1,71 @@
/**
* Copyright (c) Facebook, Inc. and its affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree.
*/
/**
* Content plugins have a base path and a localized path to source content from.
* We will look into the localized path in priority.
*/
export type ContentPaths = {
/**
* The absolute path to the base content directory, like `"<siteDir>/docs"`.
*/
contentPath: string;
/**
* The absolute path to the localized content directory, like
* `"<siteDir>/i18n/zh-Hans/plugin-content-docs"`.
*/
contentPathLocalized: string;
};
/** Data structure representing each broken Markdown link to be reported. */
export type BrokenMarkdownLink<T extends ContentPaths> = {
/** Absolute path to the file containing this link. */
filePath: string;
/**
* This is generic because it may contain extra metadata like version name,
* which the reporter can provide for context.
*/
contentPaths: T;
/**
* The content of the link, like `"./brokenFile.md"`
*/
link: string;
};
/**
* Takes a Markdown file and replaces relative file references with their URL
* counterparts, e.g. `[link](./intro.md)` => `[link](/docs/intro)`, preserving
* everything else.
*
* This method uses best effort to find a matching file. The file reference can
* be relative to the directory of the current file (most likely) or any of the
* content paths (so `/tutorials/intro.md` can be resolved as
* `<siteDir>/docs/tutorials/intro.md`). Links that contain the `http(s):` or
* `@site/` prefix will always be ignored.
*/
export declare function replaceMarkdownLinks<T extends ContentPaths>({ siteDir, fileString, filePath, contentPaths, sourceToPermalink, }: {
/** Absolute path to the site directory, used to resolve aliased paths. */
siteDir: string;
/** The Markdown file content to be processed. */
fileString: string;
/** Absolute path to the current file containing `fileString`. */
filePath: string;
/** The content paths which the file reference may live in. */
contentPaths: T;
/**
* A map from source paths to their URLs. Source paths are `@site` aliased.
*/
sourceToPermalink: {
[aliasedPath: string]: string;
};
}): {
/**
* The content with all Markdown file references replaced with their URLs.
* Unresolved links are left as-is.
*/
newContent: string;
/** The list of broken links, */
brokenMarkdownLinks: BrokenMarkdownLink<T>[];
};
//# sourceMappingURL=markdownLinks.d.ts.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"markdownLinks.d.ts","sourceRoot":"","sources":["../src/markdownLinks.ts"],"names":[],"mappings":"AAAA;;;;;GAKG;AAMH;;;GAGG;AACH,MAAM,MAAM,YAAY,GAAG;IACzB;;OAEG;IACH,WAAW,EAAE,MAAM,CAAC;IACpB;;;OAGG;IACH,oBAAoB,EAAE,MAAM,CAAC;CAC9B,CAAC;AAEF,4EAA4E;AAC5E,MAAM,MAAM,kBAAkB,CAAC,CAAC,SAAS,YAAY,IAAI;IACvD,sDAAsD;IACtD,QAAQ,EAAE,MAAM,CAAC;IACjB;;;OAGG;IACH,YAAY,EAAE,CAAC,CAAC;IAChB;;OAEG;IACH,IAAI,EAAE,MAAM,CAAC;CACd,CAAC;AAoBF;;;;;;;;;;GAUG;AACH,wBAAgB,oBAAoB,CAAC,CAAC,SAAS,YAAY,EAAE,EAC3D,OAAO,EACP,UAAU,EACV,QAAQ,EACR,YAAY,EACZ,iBAAiB,GAClB,EAAE;IACD,0EAA0E;IAC1E,OAAO,EAAE,MAAM,CAAC;IAChB,iDAAiD;IACjD,UAAU,EAAE,MAAM,CAAC;IACnB,iEAAiE;IACjE,QAAQ,EAAE,MAAM,CAAC;IACjB,8DAA8D;IAC9D,YAAY,EAAE,CAAC,CAAC;IAChB;;OAEG;IACH,iBAAiB,EAAE;QAAC,CAAC,WAAW,EAAE,MAAM,GAAG,MAAM,CAAA;KAAC,CAAC;CACpD,GAAG;IACF;;;OAGG;IACH,UAAU,EAAE,MAAM,CAAC;IACnB,iCAAiC;IACjC,mBAAmB,EAAE,kBAAkB,CAAC,CAAC,CAAC,EAAE,CAAC;CAC9C,CAmGA"}

119
node_modules/@docusaurus/utils/lib/markdownLinks.js generated vendored Normal file
View File

@@ -0,0 +1,119 @@
"use strict";
/**
* Copyright (c) Facebook, Inc. and its affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree.
*/
Object.defineProperty(exports, "__esModule", { value: true });
exports.replaceMarkdownLinks = void 0;
const tslib_1 = require("tslib");
const path_1 = tslib_1.__importDefault(require("path"));
const dataFileUtils_1 = require("./dataFileUtils");
const pathUtils_1 = require("./pathUtils");
function parseCodeFence(line) {
const match = line.trim().match(/^(?<fence>`{3,}|~{3,})(?<rest>.*)/);
if (!match) {
return null;
}
return {
type: match.groups.fence[0],
definitelyOpen: !!match.groups.rest,
count: match.groups.fence.length,
};
}
/**
* Takes a Markdown file and replaces relative file references with their URL
* counterparts, e.g. `[link](./intro.md)` => `[link](/docs/intro)`, preserving
* everything else.
*
* This method uses best effort to find a matching file. The file reference can
* be relative to the directory of the current file (most likely) or any of the
* content paths (so `/tutorials/intro.md` can be resolved as
* `<siteDir>/docs/tutorials/intro.md`). Links that contain the `http(s):` or
* `@site/` prefix will always be ignored.
*/
function replaceMarkdownLinks({ siteDir, fileString, filePath, contentPaths, sourceToPermalink, }) {
const brokenMarkdownLinks = [];
// Replace internal markdown linking (except in fenced blocks).
let lastOpenCodeFence = null;
const lines = fileString.split('\n').map((line) => {
const codeFence = parseCodeFence(line);
if (codeFence) {
if (!lastOpenCodeFence) {
lastOpenCodeFence = codeFence;
}
else if (!codeFence.definitelyOpen &&
lastOpenCodeFence.type === codeFence.type &&
lastOpenCodeFence.count <= codeFence.count) {
// All three conditions must be met in order for this to be considered
// a closing fence.
lastOpenCodeFence = null;
}
}
if (lastOpenCodeFence) {
return line;
}
let modifiedLine = line;
// Replace inline-style links or reference-style links e.g:
// This is [Document 1](doc1.md)
// [doc1]: doc1.md
const linkTitlePattern = '(?:\\s+(?:\'.*?\'|".*?"|\\(.*?\\)))?';
const linkSuffixPattern = '(?:\\?[^#>\\s]+)?(?:#[^>\\s]+)?';
const linkCapture = (forbidden) => `((?!https?://|@site/)[^${forbidden}#?]+)`;
const linkURLPattern = `(?:(?!<)${linkCapture('()\\s')}${linkSuffixPattern}|<${linkCapture('>')}${linkSuffixPattern}>)`;
const linkPattern = new RegExp(`\\[(?:(?!\\]\\().)*\\]\\(\\s*${linkURLPattern}${linkTitlePattern}\\s*\\)|^\\s*\\[[^[\\]]*[^[\\]\\s][^[\\]]*\\]:\\s*${linkURLPattern}${linkTitlePattern}$`, 'dgm');
let mdMatch = linkPattern.exec(modifiedLine);
while (mdMatch !== null) {
// Replace it to correct html link.
const mdLink = mdMatch.slice(1, 5).find(Boolean);
const mdLinkRange = mdMatch.indices.slice(1, 5).find(Boolean);
if (!/\.mdx?$/.test(mdLink)) {
mdMatch = linkPattern.exec(modifiedLine);
continue;
}
const sourcesToTry = [];
// ./file.md and ../file.md are always relative to the current file
if (!mdLink.startsWith('./') && !mdLink.startsWith('../')) {
sourcesToTry.push(...(0, dataFileUtils_1.getContentPathList)(contentPaths), siteDir);
}
// /file.md is always relative to the content path
if (!mdLink.startsWith('/')) {
sourcesToTry.push(path_1.default.dirname(filePath));
}
const aliasedSourceMatch = sourcesToTry
.map((p) => path_1.default.join(p, decodeURIComponent(mdLink)))
.map((source) => (0, pathUtils_1.aliasedSitePath)(source, siteDir))
.find((source) => sourceToPermalink[source]);
const permalink = aliasedSourceMatch
? sourceToPermalink[aliasedSourceMatch]
: undefined;
if (permalink) {
// MDX won't be happy if the permalink contains a space, we need to
// convert it to %20
const encodedPermalink = permalink
.split('/')
.map((part) => part.replace(/\s/g, '%20'))
.join('/');
modifiedLine = `${modifiedLine.slice(0, mdLinkRange[0])}${encodedPermalink}${modifiedLine.slice(mdLinkRange[1])}`;
// Adjust the lastIndex to avoid passing over the next link if the
// newly replaced URL is shorter.
linkPattern.lastIndex += encodedPermalink.length - mdLink.length;
}
else {
const brokenMarkdownLink = {
contentPaths,
filePath,
link: mdLink,
};
brokenMarkdownLinks.push(brokenMarkdownLink);
}
mdMatch = linkPattern.exec(modifiedLine);
}
return modifiedLine;
});
const newContent = lines.join('\n');
return { newContent, brokenMarkdownLinks };
}
exports.replaceMarkdownLinks = replaceMarkdownLinks;
//# sourceMappingURL=markdownLinks.js.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"markdownLinks.js","sourceRoot":"","sources":["../src/markdownLinks.ts"],"names":[],"mappings":";AAAA;;;;;GAKG;;;;AAEH,wDAAwB;AACxB,mDAAmD;AACnD,2CAA4C;AAuC5C,SAAS,cAAc,CAAC,IAAY;IAClC,MAAM,KAAK,GAAG,IAAI,CAAC,IAAI,EAAE,CAAC,KAAK,CAAC,mCAAmC,CAAC,CAAC;IACrE,IAAI,CAAC,KAAK,EAAE;QACV,OAAO,IAAI,CAAC;KACb;IACD,OAAO;QACL,IAAI,EAAE,KAAK,CAAC,MAAO,CAAC,KAAM,CAAC,CAAC,CAAe;QAC3C,cAAc,EAAE,CAAC,CAAC,KAAK,CAAC,MAAO,CAAC,IAAK;QACrC,KAAK,EAAE,KAAK,CAAC,MAAO,CAAC,KAAM,CAAC,MAAM;KACnC,CAAC;AACJ,CAAC;AAED;;;;;;;;;;GAUG;AACH,SAAgB,oBAAoB,CAAyB,EAC3D,OAAO,EACP,UAAU,EACV,QAAQ,EACR,YAAY,EACZ,iBAAiB,GAclB;IASC,MAAM,mBAAmB,GAA4B,EAAE,CAAC;IAExD,+DAA+D;IAC/D,IAAI,iBAAiB,GAAqB,IAAI,CAAC;IAC/C,MAAM,KAAK,GAAG,UAAU,CAAC,KAAK,CAAC,IAAI,CAAC,CAAC,GAAG,CAAC,CAAC,IAAI,EAAE,EAAE;QAChD,MAAM,SAAS,GAAG,cAAc,CAAC,IAAI,CAAC,CAAC;QACvC,IAAI,SAAS,EAAE;YACb,IAAI,CAAC,iBAAiB,EAAE;gBACtB,iBAAiB,GAAG,SAAS,CAAC;aAC/B;iBAAM,IACL,CAAC,SAAS,CAAC,cAAc;gBACzB,iBAAiB,CAAC,IAAI,KAAK,SAAS,CAAC,IAAI;gBACzC,iBAAiB,CAAC,KAAK,IAAI,SAAS,CAAC,KAAK,EAC1C;gBACA,sEAAsE;gBACtE,mBAAmB;gBACnB,iBAAiB,GAAG,IAAI,CAAC;aAC1B;SACF;QACD,IAAI,iBAAiB,EAAE;YACrB,OAAO,IAAI,CAAC;SACb;QAED,IAAI,YAAY,GAAG,IAAI,CAAC;QACxB,2DAA2D;QAC3D,gCAAgC;QAChC,kBAAkB;QAClB,MAAM,gBAAgB,GAAG,sCAAsC,CAAC;QAChE,MAAM,iBAAiB,GAAG,iCAAiC,CAAC;QAC5D,MAAM,WAAW,GAAG,CAAC,SAAiB,EAAE,EAAE,CACxC,0BAA0B,SAAS,OAAO,CAAC;QAC7C,MAAM,cAAc,GAAG,WAAW,WAAW,CAC3C,OAAO,CACR,GAAG,iBAAiB,KAAK,WAAW,CAAC,GAAG,CAAC,GAAG,iBAAiB,IAAI,CAAC;QACnE,MAAM,WAAW,GAAG,IAAI,MAAM,CAC5B,gCAAgC,cAAc,GAAG,gBAAgB,qDAAqD,cAAc,GAAG,gBAAgB,GAAG,EAC1J,KAAK,CACN,CAAC;QACF,IAAI,OAAO,GAAG,WAAW,CAAC,IAAI,CAAC,YAAY,CAAC,CAAC;QAC7C,OAAO,OAAO,KAAK,IAAI,EAAE;YACvB,mCAAmC;YACnC,MAAM,MAAM,GAAG,OAAO,CAAC,KAAK,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,IAAI,CAAC,OAAO,CAAE,CAAC;YAClD,MAAM,WAAW,GAAG,OAAO,CAAC,OAAQ,CAAC,KAAK,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,IAAI,CAAC,OAAO,CAAE,CAAC;YAChE,IAAI,CAAC,SAAS,CAAC,IAAI,CAAC,MAAM,CAAC,EAAE;gBAC3B,OAAO,GAAG,WAAW,CAAC,IAAI,CAAC,YAAY,CAAC,CAAC;gBACzC,SAAS;aACV;YAED,MAAM,YAAY,GAAa,EAAE,CAAC;YAClC,mEAAmE;YACnE,IAAI,CAAC,MAAM,CAAC,UAAU,CAAC,IAAI,CAAC,IAAI,CAAC,MAAM,CAAC,UAAU,CAAC,KAAK,CAAC,EAAE;gBACzD,YAAY,CAAC,IAAI,CAAC,GAAG,IAAA,kCAAkB,EAAC,YAAY,CAAC,EAAE,OAAO,CAAC,CAAC;aACjE;YACD,kDAAkD;YAClD,IAAI,CAAC,MAAM,CAAC,UAAU,CAAC,GAAG,CAAC,EAAE;gBAC3B,YAAY,CAAC,IAAI,CAAC,cAAI,CAAC,OAAO,CAAC,QAAQ,CAAC,CAAC,CAAC;aAC3C;YAED,MAAM,kBAAkB,GAAG,YAAY;iBACpC,GAAG,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,cAAI,CAAC,IAAI,CAAC,CAAC,EAAE,kBAAkB,CAAC,MAAM,CAAC,CAAC,CAAC;iBACpD,GAAG,CAAC,CAAC,MAAM,EAAE,EAAE,CAAC,IAAA,2BAAe,EAAC,MAAM,EAAE,OAAO,CAAC,CAAC;iBACjD,IAAI,CAAC,CAAC,MAAM,EAAE,EAAE,CAAC,iBAAiB,CAAC,MAAM,CAAC,CAAC,CAAC;YAE/C,MAAM,SAAS,GAAuB,kBAAkB;gBACtD,CAAC,CAAC,iBAAiB,CAAC,kBAAkB,CAAC;gBACvC,CAAC,CAAC,SAAS,CAAC;YAEd,IAAI,SAAS,EAAE;gBACb,mEAAmE;gBACnE,oBAAoB;gBACpB,MAAM,gBAAgB,GAAG,SAAS;qBAC/B,KAAK,CAAC,GAAG,CAAC;qBACV,GAAG,CAAC,CAAC,IAAI,EAAE,EAAE,CAAC,IAAI,CAAC,OAAO,CAAC,KAAK,EAAE,KAAK,CAAC,CAAC;qBACzC,IAAI,CAAC,GAAG,CAAC,CAAC;gBACb,YAAY,GAAG,GAAG,YAAY,CAAC,KAAK,CAClC,CAAC,EACD,WAAW,CAAC,CAAC,CAAC,CACf,GAAG,gBAAgB,GAAG,YAAY,CAAC,KAAK,CAAC,WAAW,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC;gBAC5D,kEAAkE;gBAClE,iCAAiC;gBACjC,WAAW,CAAC,SAAS,IAAI,gBAAgB,CAAC,MAAM,GAAG,MAAM,CAAC,MAAM,CAAC;aAClE;iBAAM;gBACL,MAAM,kBAAkB,GAA0B;oBAChD,YAAY;oBACZ,QAAQ;oBACR,IAAI,EAAE,MAAM;iBACb,CAAC;gBAEF,mBAAmB,CAAC,IAAI,CAAC,kBAAkB,CAAC,CAAC;aAC9C;YACD,OAAO,GAAG,WAAW,CAAC,IAAI,CAAC,YAAY,CAAC,CAAC;SAC1C;QACD,OAAO,YAAY,CAAC;IACtB,CAAC,CAAC,CAAC;IAEH,MAAM,UAAU,GAAG,KAAK,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;IAEpC,OAAO,EAAC,UAAU,EAAE,mBAAmB,EAAC,CAAC;AAC3C,CAAC;AA9HD,oDA8HC"}

143
node_modules/@docusaurus/utils/lib/markdownUtils.d.ts generated vendored Normal file
View File

@@ -0,0 +1,143 @@
/**
* Copyright (c) Facebook, Inc. and its affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree.
*/
import { type SluggerOptions } from './slugger';
import type { ParseFrontMatter, DefaultParseFrontMatter } from '@docusaurus/types';
/**
* Parses custom ID from a heading. The ID can contain any characters except
* `{#` and `}`.
*
* @param heading e.g. `## Some heading {#some-heading}` where the last
* character must be `}` for the ID to be recognized
*/
export declare function parseMarkdownHeadingId(heading: string): {
/**
* The heading content sans the ID part, right-trimmed. e.g. `## Some heading`
*/
text: string;
/** The heading ID. e.g. `some-heading` */
id: string | undefined;
};
/**
* MDX 2 requires escaping { with a \ so our anchor syntax need that now.
* See https://mdxjs.com/docs/troubleshooting-mdx/#could-not-parse-expression-with-acorn-error
*/
export declare function escapeMarkdownHeadingIds(content: string): string;
/**
* Hacky temporary escape hatch for Crowdin bad MDX support
* See https://docusaurus.io/docs/i18n/crowdin#mdx
*
* TODO Titus suggested a clean solution based on ```mdx eval and Remark
* See https://github.com/mdx-js/mdx/issues/701#issuecomment-947030041
*
* @param content
*/
export declare function unwrapMdxCodeBlocks(content: string): string;
/**
* Add support for our legacy ":::note Title" admonition syntax
* Not supported by https://github.com/remarkjs/remark-directive
* Syntax is transformed to ":::note[Title]" (container directive label)
* See https://talk.commonmark.org/t/generic-directives-plugins-syntax/444
*
* @param content
* @param admonitionContainerDirectives
*/
export declare function admonitionTitleToDirectiveLabel(content: string, admonitionContainerDirectives: string[]): string;
/**
* Creates an excerpt of a Markdown file. This function will:
*
* - Ignore h1 headings (setext or atx)
* - Ignore import/export
* - Ignore code blocks
*
* And for the first contentful line, it will strip away most Markdown
* syntax, including HTML tags, emphasis, links (keeping the text), etc.
*/
export declare function createExcerpt(fileString: string): string | undefined;
/**
* Takes a raw Markdown file content, and parses the front matter using
* gray-matter. Worth noting that gray-matter accepts TOML and other markup
* languages as well.
*
* @throws Throws when gray-matter throws. e.g.:
* ```md
* ---
* foo: : bar
* ---
* ```
*/
export declare function parseFileContentFrontMatter(fileContent: string): {
/** Front matter as parsed by gray-matter. */
frontMatter: {
[key: string]: unknown;
};
/** The remaining content, trimmed. */
content: string;
};
export declare const DEFAULT_PARSE_FRONT_MATTER: DefaultParseFrontMatter;
type ParseMarkdownContentTitleOptions = {
/**
* If `true`, the matching title will be removed from the returned content.
* We can promise that at least one empty line will be left between the
* content before and after, but you shouldn't make too much assumption
* about what's left.
*/
removeContentTitle?: boolean;
};
/**
* Takes the raw Markdown content, without front matter, and tries to find an h1
* title (setext or atx) to be used as metadata.
*
* It only searches until the first contentful paragraph, ignoring import/export
* declarations.
*
* It will try to convert markdown to reasonable text, but won't be best effort,
* since it's only used as a fallback when `frontMatter.title` is not provided.
* For now, we just unwrap inline code (``# `config.js` `` => `config.js`).
*/
export declare function parseMarkdownContentTitle(contentUntrimmed: string, options?: ParseMarkdownContentTitleOptions): {
/** The content, optionally without the content title. */
content: string;
/** The title, trimmed and without the `#`. */
contentTitle: string | undefined;
};
/**
* Makes a full-round parse.
*
* @throws Throws when `parseFrontMatter` throws, usually because of invalid
* syntax.
*/
export declare function parseMarkdownFile({ filePath, fileContent, parseFrontMatter, removeContentTitle, }: {
filePath: string;
fileContent: string;
parseFrontMatter: ParseFrontMatter;
} & ParseMarkdownContentTitleOptions): Promise<{
/** @see {@link parseFrontMatter} */
frontMatter: {
[key: string]: unknown;
};
/** @see {@link parseMarkdownContentTitle} */
contentTitle: string | undefined;
/** @see {@link createExcerpt} */
excerpt: string | undefined;
/**
* Content without front matter and (optionally) without title, depending on
* the `removeContentTitle` option.
*/
content: string;
}>;
export type WriteHeadingIDOptions = SluggerOptions & {
/** Overwrite existing heading IDs. */
overwrite?: boolean;
};
/**
* Takes Markdown content, returns new content with heading IDs written.
* Respects existing IDs (unless `overwrite=true`) and never generates colliding
* IDs (through the slugger).
*/
export declare function writeMarkdownHeadingId(content: string, options?: WriteHeadingIDOptions): string;
export {};
//# sourceMappingURL=markdownUtils.d.ts.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"markdownUtils.d.ts","sourceRoot":"","sources":["../src/markdownUtils.ts"],"names":[],"mappings":"AAAA;;;;;GAKG;AAIH,OAAO,EAA8B,KAAK,cAAc,EAAC,MAAM,WAAW,CAAC;AAC3E,OAAO,KAAK,EACV,gBAAgB,EAChB,uBAAuB,EACxB,MAAM,mBAAmB,CAAC;AAM3B;;;;;;GAMG;AACH,wBAAgB,sBAAsB,CAAC,OAAO,EAAE,MAAM,GAAG;IACvD;;OAEG;IACH,IAAI,EAAE,MAAM,CAAC;IACb,0CAA0C;IAC1C,EAAE,EAAE,MAAM,GAAG,SAAS,CAAC;CACxB,CAUA;AAED;;;GAGG;AACH,wBAAgB,wBAAwB,CAAC,OAAO,EAAE,MAAM,GAAG,MAAM,CAShE;AAED;;;;;;;;GAQG;AACH,wBAAgB,mBAAmB,CAAC,OAAO,EAAE,MAAM,GAAG,MAAM,CAc3D;AAED;;;;;;;;GAQG;AACH,wBAAgB,+BAA+B,CAC7C,OAAO,EAAE,MAAM,EACf,6BAA6B,EAAE,MAAM,EAAE,GACtC,MAAM,CAiBR;AAID;;;;;;;;;GASG;AACH,wBAAgB,aAAa,CAAC,UAAU,EAAE,MAAM,GAAG,MAAM,GAAG,SAAS,CA8EpE;AAED;;;;;;;;;;;GAWG;AACH,wBAAgB,2BAA2B,CAAC,WAAW,EAAE,MAAM,GAAG;IAChE,6CAA6C;IAC7C,WAAW,EAAE;QAAC,CAAC,GAAG,EAAE,MAAM,GAAG,OAAO,CAAA;KAAC,CAAC;IACtC,sCAAsC;IACtC,OAAO,EAAE,MAAM,CAAC;CACjB,CAuBA;AAED,eAAO,MAAM,0BAA0B,EAAE,uBAEW,CAAC;AAMrD,KAAK,gCAAgC,GAAG;IACtC;;;;;OAKG;IACH,kBAAkB,CAAC,EAAE,OAAO,CAAC;CAC9B,CAAC;AAEF;;;;;;;;;;GAUG;AACH,wBAAgB,yBAAyB,CACvC,gBAAgB,EAAE,MAAM,EACxB,OAAO,CAAC,EAAE,gCAAgC,GACzC;IACD,yDAAyD;IACzD,OAAO,EAAE,MAAM,CAAC;IAChB,8CAA8C;IAC9C,YAAY,EAAE,MAAM,GAAG,SAAS,CAAC;CAClC,CAyCA;AAED;;;;;GAKG;AACH,wBAAsB,iBAAiB,CAAC,EACtC,QAAQ,EACR,WAAW,EACX,gBAAgB,EAChB,kBAAkB,GACnB,EAAE;IACD,QAAQ,EAAE,MAAM,CAAC;IACjB,WAAW,EAAE,MAAM,CAAC;IACpB,gBAAgB,EAAE,gBAAgB,CAAC;CACpC,GAAG,gCAAgC,GAAG,OAAO,CAAC;IAC7C,oCAAoC;IACpC,WAAW,EAAE;QAAC,CAAC,GAAG,EAAE,MAAM,GAAG,OAAO,CAAA;KAAC,CAAC;IACtC,6CAA6C;IAC7C,YAAY,EAAE,MAAM,GAAG,SAAS,CAAC;IACjC,iCAAiC;IACjC,OAAO,EAAE,MAAM,GAAG,SAAS,CAAC;IAC5B;;;OAGG;IACH,OAAO,EAAE,MAAM,CAAC;CACjB,CAAC,CA2BD;AA4BD,MAAM,MAAM,qBAAqB,GAAG,cAAc,GAAG;IACnD,sCAAsC;IACtC,SAAS,CAAC,EAAE,OAAO,CAAC;CACrB,CAAC;AAEF;;;;GAIG;AACH,wBAAgB,sBAAsB,CACpC,OAAO,EAAE,MAAM,EACf,OAAO,GAAE,qBAA+D,GACvE,MAAM,CAoCR"}

345
node_modules/@docusaurus/utils/lib/markdownUtils.js generated vendored Normal file
View File

@@ -0,0 +1,345 @@
"use strict";
/**
* Copyright (c) Facebook, Inc. and its affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree.
*/
Object.defineProperty(exports, "__esModule", { value: true });
exports.writeMarkdownHeadingId = exports.parseMarkdownFile = exports.parseMarkdownContentTitle = exports.DEFAULT_PARSE_FRONT_MATTER = exports.parseFileContentFrontMatter = exports.createExcerpt = exports.admonitionTitleToDirectiveLabel = exports.unwrapMdxCodeBlocks = exports.escapeMarkdownHeadingIds = exports.parseMarkdownHeadingId = void 0;
const tslib_1 = require("tslib");
const logger_1 = tslib_1.__importDefault(require("@docusaurus/logger"));
const gray_matter_1 = tslib_1.__importDefault(require("gray-matter"));
const slugger_1 = require("./slugger");
// Some utilities for parsing Markdown content. These things are only used on
// server-side when we infer metadata like `title` and `description` from the
// content. Most parsing is still done in MDX through the mdx-loader.
/**
* Parses custom ID from a heading. The ID can contain any characters except
* `{#` and `}`.
*
* @param heading e.g. `## Some heading {#some-heading}` where the last
* character must be `}` for the ID to be recognized
*/
function parseMarkdownHeadingId(heading) {
const customHeadingIdRegex = /\s*\{#(?<id>(?:.(?!\{#|\}))*.)\}$/;
const matches = customHeadingIdRegex.exec(heading);
if (matches) {
return {
text: heading.replace(matches[0], ''),
id: matches.groups.id,
};
}
return { text: heading, id: undefined };
}
exports.parseMarkdownHeadingId = parseMarkdownHeadingId;
/**
* MDX 2 requires escaping { with a \ so our anchor syntax need that now.
* See https://mdxjs.com/docs/troubleshooting-mdx/#could-not-parse-expression-with-acorn-error
*/
function escapeMarkdownHeadingIds(content) {
const markdownHeadingRegexp = /(?:^|\n)#{1,6}(?!#).*/g;
return content.replaceAll(markdownHeadingRegexp, (substring) =>
// TODO probably not the most efficient impl...
substring
.replace('{#', '\\{#')
// prevent duplicate escaping
.replace('\\\\{#', '\\{#'));
}
exports.escapeMarkdownHeadingIds = escapeMarkdownHeadingIds;
/**
* Hacky temporary escape hatch for Crowdin bad MDX support
* See https://docusaurus.io/docs/i18n/crowdin#mdx
*
* TODO Titus suggested a clean solution based on ```mdx eval and Remark
* See https://github.com/mdx-js/mdx/issues/701#issuecomment-947030041
*
* @param content
*/
function unwrapMdxCodeBlocks(content) {
// We only support 3/4 backticks on purpose, should be good enough
const regexp3 = /(?<begin>^|\n)```(?<spaces>\x20*)mdx-code-block\n(?<children>.*?)\n```(?<end>\n|$)/gs;
const regexp4 = /(?<begin>^|\n)````(?<spaces>\x20*)mdx-code-block\n(?<children>.*?)\n````(?<end>\n|$)/gs;
// eslint-disable-next-line @typescript-eslint/no-explicit-any
const replacer = (substring, ...args) => {
const groups = args.at(-1);
return `${groups.begin}${groups.children}${groups.end}`;
};
return content.replaceAll(regexp3, replacer).replaceAll(regexp4, replacer);
}
exports.unwrapMdxCodeBlocks = unwrapMdxCodeBlocks;
/**
* Add support for our legacy ":::note Title" admonition syntax
* Not supported by https://github.com/remarkjs/remark-directive
* Syntax is transformed to ":::note[Title]" (container directive label)
* See https://talk.commonmark.org/t/generic-directives-plugins-syntax/444
*
* @param content
* @param admonitionContainerDirectives
*/
function admonitionTitleToDirectiveLabel(content, admonitionContainerDirectives) {
// this will also process ":::note Title" inside docs code blocks
// good enough: we fixed older versions docs to not be affected
const directiveNameGroup = `(${admonitionContainerDirectives.join('|')})`;
const regexp = new RegExp(`^(?<quote>(> ?)*)(?<indentation>( +|\t+))?(?<directive>:{3,}${directiveNameGroup}) +(?<title>.*)$`, 'gm');
return content.replaceAll(regexp, (substring, ...args) => {
const groups = args.at(-1);
return `${groups.quote ?? ''}${groups.indentation ?? ''}${groups.directive}[${groups.title}]`;
});
}
exports.admonitionTitleToDirectiveLabel = admonitionTitleToDirectiveLabel;
// TODO: Find a better way to do so, possibly by compiling the Markdown content,
// stripping out HTML tags and obtaining the first line.
/**
* Creates an excerpt of a Markdown file. This function will:
*
* - Ignore h1 headings (setext or atx)
* - Ignore import/export
* - Ignore code blocks
*
* And for the first contentful line, it will strip away most Markdown
* syntax, including HTML tags, emphasis, links (keeping the text), etc.
*/
function createExcerpt(fileString) {
const fileLines = fileString
.trimStart()
// Remove Markdown alternate title
.replace(/^[^\r\n]*\r?\n[=]+/g, '')
.split(/\r?\n/);
let inCode = false;
let inImport = false;
let lastCodeFence = '';
for (const fileLine of fileLines) {
// An empty line marks the end of imports
if (!fileLine.trim() && inImport) {
inImport = false;
}
// Skip empty line.
if (!fileLine.trim()) {
continue;
}
// Skip import/export declaration.
if ((/^(?:import|export)\s.*/.test(fileLine) || inImport) && !inCode) {
inImport = true;
continue;
}
// Skip code block line.
if (fileLine.trim().startsWith('```')) {
const codeFence = fileLine.trim().match(/^`+/)[0];
if (!inCode) {
inCode = true;
lastCodeFence = codeFence;
// If we are in a ````-fenced block, all ``` would be plain text instead
// of fences
}
else if (codeFence.length >= lastCodeFence.length) {
inCode = false;
}
continue;
}
else if (inCode) {
continue;
}
const cleanedLine = fileLine
// Remove HTML tags.
.replace(/<[^>]*>/g, '')
// Remove Title headers
.replace(/^#[^#]+#?/gm, '')
// Remove Markdown + ATX-style headers
.replace(/^#{1,6}\s*(?<text>[^#]*?)\s*#{0,6}/gm, '$1')
// Remove emphasis.
.replace(/(?<opening>[*_]{1,3})(?<text>.*?)\1/g, '$2')
// Remove strikethroughs.
.replace(/~~(?<text>\S.*\S)~~/g, '$1')
// Remove images.
.replace(/!\[(?<alt>.*?)\][[(].*?[\])]/g, '$1')
// Remove footnotes.
.replace(/\[\^.+?\](?:: .*$)?/g, '')
// Remove inline links.
.replace(/\[(?<alt>.*?)\][[(].*?[\])]/g, '$1')
// Remove inline code.
.replace(/`(?<text>.+?)`/g, '$1')
// Remove blockquotes.
.replace(/^\s{0,3}>\s?/g, '')
// Remove admonition definition.
.replace(/:::.*/, '')
// Remove Emoji names within colons include preceding whitespace.
.replace(/\s?:(?:::|[^:\n])+:/g, '')
// Remove custom Markdown heading id.
.replace(/\{#*[\w-]+\}/, '')
.trim();
if (cleanedLine) {
return cleanedLine;
}
}
return undefined;
}
exports.createExcerpt = createExcerpt;
/**
* Takes a raw Markdown file content, and parses the front matter using
* gray-matter. Worth noting that gray-matter accepts TOML and other markup
* languages as well.
*
* @throws Throws when gray-matter throws. e.g.:
* ```md
* ---
* foo: : bar
* ---
* ```
*/
function parseFileContentFrontMatter(fileContent) {
// TODO Docusaurus v4: replace gray-matter by a better lib
// gray-matter is unmaintained, not flexible, and the code doesn't look good
const { data, content } = (0, gray_matter_1.default)(fileContent);
// gray-matter has an undocumented front matter caching behavior
// https://github.com/jonschlinkert/gray-matter/blob/ce67a86dba419381db0dd01cc84e2d30a1d1e6a5/index.js#L39
// Unfortunately, this becomes a problem when we mutate returned front matter
// We want to make it possible as part of the parseFrontMatter API
// So we make it safe to mutate by always providing a deep copy
const frontMatter =
// And of course structuredClone() doesn't work well with Date in Jest...
// See https://github.com/jestjs/jest/issues/2549
// So we parse again for tests with a {} option object
// This undocumented empty option object disables gray-matter caching..
process.env.JEST_WORKER_ID
? (0, gray_matter_1.default)(fileContent, {}).data
: structuredClone(data);
return {
frontMatter,
content: content.trim(),
};
}
exports.parseFileContentFrontMatter = parseFileContentFrontMatter;
const DEFAULT_PARSE_FRONT_MATTER = async (params) => parseFileContentFrontMatter(params.fileContent);
exports.DEFAULT_PARSE_FRONT_MATTER = DEFAULT_PARSE_FRONT_MATTER;
function toTextContentTitle(contentTitle) {
return contentTitle.replace(/`(?<text>[^`]*)`/g, '$<text>');
}
/**
* Takes the raw Markdown content, without front matter, and tries to find an h1
* title (setext or atx) to be used as metadata.
*
* It only searches until the first contentful paragraph, ignoring import/export
* declarations.
*
* It will try to convert markdown to reasonable text, but won't be best effort,
* since it's only used as a fallback when `frontMatter.title` is not provided.
* For now, we just unwrap inline code (``# `config.js` `` => `config.js`).
*/
function parseMarkdownContentTitle(contentUntrimmed, options) {
const removeContentTitleOption = options?.removeContentTitle ?? false;
const content = contentUntrimmed.trim();
// We only need to detect import statements that will be parsed by MDX as
// `import` nodes, as broken syntax can't render anyways. That means any block
// that has `import` at the very beginning and surrounded by empty lines.
const contentWithoutImport = content
.replace(/^(?:import\s(?:.|\r?\n(?!\r?\n))*(?:\r?\n){2,})*/, '')
.trim();
const regularTitleMatch = /^#[ \t]+(?<title>[^ \t].*)(?:\r?\n|$)/.exec(contentWithoutImport);
const alternateTitleMatch = /^(?<title>.*)\r?\n=+(?:\r?\n|$)/.exec(contentWithoutImport);
const titleMatch = regularTitleMatch ?? alternateTitleMatch;
if (!titleMatch) {
return { content, contentTitle: undefined };
}
const newContent = removeContentTitleOption
? content.replace(titleMatch[0], '')
: content;
if (regularTitleMatch) {
return {
content: newContent.trim(),
contentTitle: toTextContentTitle(regularTitleMatch
.groups.title.trim()
.replace(/\s*(?:\{#*[\w-]+\}|#+)$/, '')).trim(),
};
}
return {
content: newContent.trim(),
contentTitle: toTextContentTitle(alternateTitleMatch.groups.title.trim().replace(/\s*=+$/, '')).trim(),
};
}
exports.parseMarkdownContentTitle = parseMarkdownContentTitle;
/**
* Makes a full-round parse.
*
* @throws Throws when `parseFrontMatter` throws, usually because of invalid
* syntax.
*/
async function parseMarkdownFile({ filePath, fileContent, parseFrontMatter, removeContentTitle, }) {
try {
const { frontMatter, content: contentWithoutFrontMatter } = await parseFrontMatter({
filePath,
fileContent,
defaultParseFrontMatter: exports.DEFAULT_PARSE_FRONT_MATTER,
});
const { content, contentTitle } = parseMarkdownContentTitle(contentWithoutFrontMatter, { removeContentTitle });
const excerpt = createExcerpt(content);
return {
frontMatter,
content,
contentTitle,
excerpt,
};
}
catch (err) {
logger_1.default.error(`Error while parsing Markdown front matter.
This can happen if you use special characters in front matter values (try using double quotes around that value).`);
throw err;
}
}
exports.parseMarkdownFile = parseMarkdownFile;
function unwrapMarkdownLinks(line) {
return line.replace(/\[(?<alt>[^\]]+)\]\([^)]+\)/g, (match, p1) => p1);
}
function addHeadingId(line, slugger, maintainCase) {
let headingLevel = 0;
while (line.charAt(headingLevel) === '#') {
headingLevel += 1;
}
const headingText = line.slice(headingLevel).trimEnd();
const headingHashes = line.slice(0, headingLevel);
const slug = slugger.slug(unwrapMarkdownLinks(headingText).trim(), {
maintainCase,
});
return `${headingHashes}${headingText} {#${slug}}`;
}
/**
* Takes Markdown content, returns new content with heading IDs written.
* Respects existing IDs (unless `overwrite=true`) and never generates colliding
* IDs (through the slugger).
*/
function writeMarkdownHeadingId(content, options = { maintainCase: false, overwrite: false }) {
const { maintainCase = false, overwrite = false } = options;
const lines = content.split('\n');
const slugger = (0, slugger_1.createSlugger)();
// If we can't overwrite existing slugs, make sure other headings don't
// generate colliding slugs by first marking these slugs as occupied
if (!overwrite) {
lines.forEach((line) => {
const parsedHeading = parseMarkdownHeadingId(line);
if (parsedHeading.id) {
slugger.slug(parsedHeading.id);
}
});
}
let inCode = false;
return lines
.map((line) => {
if (line.startsWith('```')) {
inCode = !inCode;
return line;
}
// Ignore h1 headings, as we don't create anchor links for those
if (inCode || !line.startsWith('##')) {
return line;
}
const parsedHeading = parseMarkdownHeadingId(line);
// Do not process if id is already there
if (parsedHeading.id && !overwrite) {
return line;
}
return addHeadingId(parsedHeading.text, slugger, maintainCase);
})
.join('\n');
}
exports.writeMarkdownHeadingId = writeMarkdownHeadingId;
//# sourceMappingURL=markdownUtils.js.map

File diff suppressed because one or more lines are too long

8
node_modules/@docusaurus/utils/lib/moduleUtils.d.ts generated vendored Normal file
View File

@@ -0,0 +1,8 @@
/**
* Copyright (c) Facebook, Inc. and its affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree.
*/
export declare function loadFreshModule(modulePath: string): Promise<unknown>;
//# sourceMappingURL=moduleUtils.d.ts.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"moduleUtils.d.ts","sourceRoot":"","sources":["../src/moduleUtils.ts"],"names":[],"mappings":"AAAA;;;;;GAKG;AAQH,wBAAsB,eAAe,CAAC,UAAU,EAAE,MAAM,GAAG,OAAO,CAAC,OAAO,CAAC,CA6B1E"}

40
node_modules/@docusaurus/utils/lib/moduleUtils.js generated vendored Normal file
View File

@@ -0,0 +1,40 @@
"use strict";
/**
* Copyright (c) Facebook, Inc. and its affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree.
*/
Object.defineProperty(exports, "__esModule", { value: true });
exports.loadFreshModule = void 0;
const tslib_1 = require("tslib");
const jiti_1 = tslib_1.__importDefault(require("jiti"));
const logger_1 = tslib_1.__importDefault(require("@docusaurus/logger"));
/*
jiti is able to load ESM, CJS, JSON, TS modules
*/
async function loadFreshModule(modulePath) {
try {
if (typeof modulePath !== 'string') {
throw new Error(logger_1.default.interpolate `Invalid module path of type name=${modulePath}`);
}
const load = (0, jiti_1.default)(__filename, {
// Transpilation cache, can be safely enabled
cache: true,
// Bypass Node.js runtime require cache
// Same as "import-fresh" package we used previously
requireCache: false,
// Only take into consideration the default export
// For now we don't need named exports
// This also helps normalize return value for both CJS/ESM/TS modules
interopDefault: true,
// debug: true,
});
return load(modulePath);
}
catch (error) {
throw new Error(logger_1.default.interpolate `Docusaurus could not load module at path path=${modulePath}\nCause: ${error.message}`, { cause: error });
}
}
exports.loadFreshModule = loadFreshModule;
//# sourceMappingURL=moduleUtils.js.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"moduleUtils.js","sourceRoot":"","sources":["../src/moduleUtils.ts"],"names":[],"mappings":";AAAA;;;;;GAKG;;;;AAEH,wDAAwB;AACxB,wEAAwC;AAExC;;GAEG;AACI,KAAK,UAAU,eAAe,CAAC,UAAkB;IACtD,IAAI;QACF,IAAI,OAAO,UAAU,KAAK,QAAQ,EAAE;YAClC,MAAM,IAAI,KAAK,CACb,gBAAM,CAAC,WAAW,CAAA,oCAAoC,UAAU,EAAE,CACnE,CAAC;SACH;QACD,MAAM,IAAI,GAAG,IAAA,cAAI,EAAC,UAAU,EAAE;YAC5B,6CAA6C;YAC7C,KAAK,EAAE,IAAI;YACX,uCAAuC;YACvC,oDAAoD;YACpD,YAAY,EAAE,KAAK;YACnB,kDAAkD;YAClD,sCAAsC;YACtC,qEAAqE;YACrE,cAAc,EAAE,IAAI;YACpB,eAAe;SAChB,CAAC,CAAC;QAEH,OAAO,IAAI,CAAC,UAAU,CAAC,CAAC;KACzB;IAAC,OAAO,KAAK,EAAE;QACd,MAAM,IAAI,KAAK,CACb,gBAAM,CAAC,WAAW,CAAA,iDAAiD,UAAU,YAC1E,KAAe,CAAC,OACnB,EAAE,EACF,EAAC,KAAK,EAAE,KAAK,EAAC,CACf,CAAC;KACH;AACH,CAAC;AA7BD,0CA6BC"}

56
node_modules/@docusaurus/utils/lib/pathUtils.d.ts generated vendored Normal file
View File

@@ -0,0 +1,56 @@
/**
* Copyright (c) Facebook, Inc. and its affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree.
*/
export declare const isNameTooLong: (str: string) => boolean;
export declare function shortName(str: string): string;
/**
* Convert Windows backslash paths to posix style paths.
* E.g: endi\lie -> endi/lie
*
* Returns original path if the posix counterpart is not valid Windows path.
* This makes the legacy code that uses posixPath safe; but also makes it less
* useful when you actually want a path with forward slashes (e.g. for URL)
*
* Adopted from https://github.com/sindresorhus/slash/blob/main/index.js
*/
export declare function posixPath(str: string): string;
/**
* When you want to display a path in a message/warning/error, it's more
* convenient to:
*
* - make it relative to `cwd()`
* - convert to posix (ie not using windows \ path separator)
*
* This way, Jest tests can run more reliably on any computer/CI on both
* Unix/Windows
* For Windows users this is not perfect (as they see / instead of \) but it's
* probably good enough
*/
export declare function toMessageRelativeFilePath(filePath: string): string;
/**
* Alias filepath relative to site directory, very useful so that we
* don't expose user's site structure.
* Example: some/path/to/website/docs/foo.md -> @site/docs/foo.md
*/
export declare function aliasedSitePath(filePath: string, siteDir: string): string;
/**
* When you have a path like C:\X\Y
* It is not safe to use directly when generating code
* For example, this would fail due to unescaped \:
* `<img src={require("${filePath}")} />`
* But this would work: `<img src={require("${escapePath(filePath)}")} />`
*
* posixPath can't be used in all cases, because forward slashes are only valid
* Windows paths when they don't contain non-ascii characters, and posixPath
* doesn't escape those that fail to be converted.
*
* This function escapes double quotes but not single quotes (because it uses
* `JSON.stringify`). Therefore, you must put the escaped path inside double
* quotes when generating code.
*/
export declare function escapePath(str: string): string;
export declare function addTrailingPathSeparator(str: string): string;
//# sourceMappingURL=pathUtils.d.ts.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"pathUtils.d.ts","sourceRoot":"","sources":["../src/pathUtils.ts"],"names":[],"mappings":"AAAA;;;;;GAKG;AAeH,eAAO,MAAM,aAAa,QAAS,MAAM,KAAG,OAMgC,CAAC;AAE7E,wBAAgB,SAAS,CAAC,GAAG,EAAE,MAAM,GAAG,MAAM,CAiB7C;AAED;;;;;;;;;GASG;AACH,wBAAgB,SAAS,CAAC,GAAG,EAAE,MAAM,GAAG,MAAM,CAO7C;AAED;;;;;;;;;;;GAWG;AACH,wBAAgB,yBAAyB,CAAC,QAAQ,EAAE,MAAM,GAAG,MAAM,CAElE;AAED;;;;GAIG;AACH,wBAAgB,eAAe,CAAC,QAAQ,EAAE,MAAM,EAAE,OAAO,EAAE,MAAM,GAAG,MAAM,CAKzE;AAED;;;;;;;;;;;;;;GAcG;AACH,wBAAgB,UAAU,CAAC,GAAG,EAAE,MAAM,GAAG,MAAM,CAK9C;AAED,wBAAgB,wBAAwB,CAAC,GAAG,EAAE,MAAM,GAAG,MAAM,CAK5D"}

115
node_modules/@docusaurus/utils/lib/pathUtils.js generated vendored Normal file
View File

@@ -0,0 +1,115 @@
"use strict";
/**
* Copyright (c) Facebook, Inc. and its affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree.
*/
Object.defineProperty(exports, "__esModule", { value: true });
exports.addTrailingPathSeparator = exports.escapePath = exports.aliasedSitePath = exports.toMessageRelativeFilePath = exports.posixPath = exports.shortName = exports.isNameTooLong = void 0;
const tslib_1 = require("tslib");
const path_1 = tslib_1.__importDefault(require("path"));
// Based on https://github.com/gatsbyjs/gatsby/pull/21518/files
// macOS (APFS) and Windows (NTFS) filename length limit = 255 chars,
// Others = 255 bytes
const MAX_PATH_SEGMENT_CHARS = 255;
const MAX_PATH_SEGMENT_BYTES = 255;
// Space for appending things to the string like file extensions and so on
const SPACE_FOR_APPENDING = 10;
const isMacOs = () => process.platform === 'darwin';
const isWindows = () => process.platform === 'win32';
const isNameTooLong = (str) =>
// Not entirely correct: we can't assume FS from OS. But good enough?
isMacOs() || isWindows()
? // Windows (NTFS) and macOS (APFS) filename length limit (255 chars)
str.length + SPACE_FOR_APPENDING > MAX_PATH_SEGMENT_CHARS
: // Other (255 bytes)
Buffer.from(str).length + SPACE_FOR_APPENDING > MAX_PATH_SEGMENT_BYTES;
exports.isNameTooLong = isNameTooLong;
function shortName(str) {
if (isMacOs() || isWindows()) {
const overflowingChars = str.length - MAX_PATH_SEGMENT_CHARS;
return str.slice(0, str.length - overflowingChars - SPACE_FOR_APPENDING - 1);
}
const strBuffer = Buffer.from(str);
const overflowingBytes = Buffer.byteLength(strBuffer) - MAX_PATH_SEGMENT_BYTES;
return strBuffer
.slice(0, Buffer.byteLength(strBuffer) - overflowingBytes - SPACE_FOR_APPENDING - 1)
.toString();
}
exports.shortName = shortName;
/**
* Convert Windows backslash paths to posix style paths.
* E.g: endi\lie -> endi/lie
*
* Returns original path if the posix counterpart is not valid Windows path.
* This makes the legacy code that uses posixPath safe; but also makes it less
* useful when you actually want a path with forward slashes (e.g. for URL)
*
* Adopted from https://github.com/sindresorhus/slash/blob/main/index.js
*/
function posixPath(str) {
const isExtendedLengthPath = str.startsWith('\\\\?\\');
if (isExtendedLengthPath) {
return str;
}
return str.replace(/\\/g, '/');
}
exports.posixPath = posixPath;
/**
* When you want to display a path in a message/warning/error, it's more
* convenient to:
*
* - make it relative to `cwd()`
* - convert to posix (ie not using windows \ path separator)
*
* This way, Jest tests can run more reliably on any computer/CI on both
* Unix/Windows
* For Windows users this is not perfect (as they see / instead of \) but it's
* probably good enough
*/
function toMessageRelativeFilePath(filePath) {
return posixPath(path_1.default.relative(process.cwd(), filePath));
}
exports.toMessageRelativeFilePath = toMessageRelativeFilePath;
/**
* Alias filepath relative to site directory, very useful so that we
* don't expose user's site structure.
* Example: some/path/to/website/docs/foo.md -> @site/docs/foo.md
*/
function aliasedSitePath(filePath, siteDir) {
const relativePath = posixPath(path_1.default.relative(siteDir, filePath));
// Cannot use path.join() as it resolves '../' and removes
// the '@site'. Let webpack loader resolve it.
return `@site/${relativePath}`;
}
exports.aliasedSitePath = aliasedSitePath;
/**
* When you have a path like C:\X\Y
* It is not safe to use directly when generating code
* For example, this would fail due to unescaped \:
* `<img src={require("${filePath}")} />`
* But this would work: `<img src={require("${escapePath(filePath)}")} />`
*
* posixPath can't be used in all cases, because forward slashes are only valid
* Windows paths when they don't contain non-ascii characters, and posixPath
* doesn't escape those that fail to be converted.
*
* This function escapes double quotes but not single quotes (because it uses
* `JSON.stringify`). Therefore, you must put the escaped path inside double
* quotes when generating code.
*/
function escapePath(str) {
const escaped = JSON.stringify(str);
// Remove the " around the json string;
return escaped.substring(1, escaped.length - 1);
}
exports.escapePath = escapePath;
function addTrailingPathSeparator(str) {
return str.endsWith(path_1.default.sep)
? str
: // If this is Windows, we need to change the forward slash to backward
`${str.replace(/[\\/]$/, '')}${path_1.default.sep}`;
}
exports.addTrailingPathSeparator = addTrailingPathSeparator;
//# sourceMappingURL=pathUtils.js.map

1
node_modules/@docusaurus/utils/lib/pathUtils.js.map generated vendored Normal file
View File

@@ -0,0 +1 @@
{"version":3,"file":"pathUtils.js","sourceRoot":"","sources":["../src/pathUtils.ts"],"names":[],"mappings":";AAAA;;;;;GAKG;;;;AAEH,wDAAwB;AAExB,+DAA+D;AAC/D,qEAAqE;AACrE,qBAAqB;AACrB,MAAM,sBAAsB,GAAG,GAAG,CAAC;AACnC,MAAM,sBAAsB,GAAG,GAAG,CAAC;AACnC,0EAA0E;AAC1E,MAAM,mBAAmB,GAAG,EAAE,CAAC;AAE/B,MAAM,OAAO,GAAG,GAAG,EAAE,CAAC,OAAO,CAAC,QAAQ,KAAK,QAAQ,CAAC;AACpD,MAAM,SAAS,GAAG,GAAG,EAAE,CAAC,OAAO,CAAC,QAAQ,KAAK,OAAO,CAAC;AAE9C,MAAM,aAAa,GAAG,CAAC,GAAW,EAAW,EAAE;AACpD,qEAAqE;AACrE,OAAO,EAAE,IAAI,SAAS,EAAE;IACtB,CAAC,CAAC,oEAAoE;QACpE,GAAG,CAAC,MAAM,GAAG,mBAAmB,GAAG,sBAAsB;IAC3D,CAAC,CAAC,oBAAoB;QACpB,MAAM,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC,MAAM,GAAG,mBAAmB,GAAG,sBAAsB,CAAC;AANhE,QAAA,aAAa,iBAMmD;AAE7E,SAAgB,SAAS,CAAC,GAAW;IACnC,IAAI,OAAO,EAAE,IAAI,SAAS,EAAE,EAAE;QAC5B,MAAM,gBAAgB,GAAG,GAAG,CAAC,MAAM,GAAG,sBAAsB,CAAC;QAC7D,OAAO,GAAG,CAAC,KAAK,CACd,CAAC,EACD,GAAG,CAAC,MAAM,GAAG,gBAAgB,GAAG,mBAAmB,GAAG,CAAC,CACxD,CAAC;KACH;IACD,MAAM,SAAS,GAAG,MAAM,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;IACnC,MAAM,gBAAgB,GACpB,MAAM,CAAC,UAAU,CAAC,SAAS,CAAC,GAAG,sBAAsB,CAAC;IACxD,OAAO,SAAS;SACb,KAAK,CACJ,CAAC,EACD,MAAM,CAAC,UAAU,CAAC,SAAS,CAAC,GAAG,gBAAgB,GAAG,mBAAmB,GAAG,CAAC,CAC1E;SACA,QAAQ,EAAE,CAAC;AAChB,CAAC;AAjBD,8BAiBC;AAED;;;;;;;;;GASG;AACH,SAAgB,SAAS,CAAC,GAAW;IACnC,MAAM,oBAAoB,GAAG,GAAG,CAAC,UAAU,CAAC,SAAS,CAAC,CAAC;IAEvD,IAAI,oBAAoB,EAAE;QACxB,OAAO,GAAG,CAAC;KACZ;IACD,OAAO,GAAG,CAAC,OAAO,CAAC,KAAK,EAAE,GAAG,CAAC,CAAC;AACjC,CAAC;AAPD,8BAOC;AAED;;;;;;;;;;;GAWG;AACH,SAAgB,yBAAyB,CAAC,QAAgB;IACxD,OAAO,SAAS,CAAC,cAAI,CAAC,QAAQ,CAAC,OAAO,CAAC,GAAG,EAAE,EAAE,QAAQ,CAAC,CAAC,CAAC;AAC3D,CAAC;AAFD,8DAEC;AAED;;;;GAIG;AACH,SAAgB,eAAe,CAAC,QAAgB,EAAE,OAAe;IAC/D,MAAM,YAAY,GAAG,SAAS,CAAC,cAAI,CAAC,QAAQ,CAAC,OAAO,EAAE,QAAQ,CAAC,CAAC,CAAC;IACjE,0DAA0D;IAC1D,8CAA8C;IAC9C,OAAO,SAAS,YAAY,EAAE,CAAC;AACjC,CAAC;AALD,0CAKC;AAED;;;;;;;;;;;;;;GAcG;AACH,SAAgB,UAAU,CAAC,GAAW;IACpC,MAAM,OAAO,GAAG,IAAI,CAAC,SAAS,CAAC,GAAG,CAAC,CAAC;IAEpC,uCAAuC;IACvC,OAAO,OAAO,CAAC,SAAS,CAAC,CAAC,EAAE,OAAO,CAAC,MAAM,GAAG,CAAC,CAAC,CAAC;AAClD,CAAC;AALD,gCAKC;AAED,SAAgB,wBAAwB,CAAC,GAAW;IAClD,OAAO,GAAG,CAAC,QAAQ,CAAC,cAAI,CAAC,GAAG,CAAC;QAC3B,CAAC,CAAC,GAAG;QACL,CAAC,CAAC,sEAAsE;YACtE,GAAG,GAAG,CAAC,OAAO,CAAC,QAAQ,EAAE,EAAE,CAAC,GAAG,cAAI,CAAC,GAAG,EAAE,CAAC;AAChD,CAAC;AALD,4DAKC"}

8
node_modules/@docusaurus/utils/lib/regExpUtils.d.ts generated vendored Normal file
View File

@@ -0,0 +1,8 @@
/**
* Copyright (c) Facebook, Inc. and its affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree.
*/
export declare function escapeRegexp(string: string): string;
//# sourceMappingURL=regExpUtils.d.ts.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"regExpUtils.d.ts","sourceRoot":"","sources":["../src/regExpUtils.ts"],"names":[],"mappings":"AAAA;;;;;GAKG;AAIH,wBAAgB,YAAY,CAAC,MAAM,EAAE,MAAM,GAAG,MAAM,CAEnD"}

16
node_modules/@docusaurus/utils/lib/regExpUtils.js generated vendored Normal file
View File

@@ -0,0 +1,16 @@
"use strict";
/**
* Copyright (c) Facebook, Inc. and its affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree.
*/
Object.defineProperty(exports, "__esModule", { value: true });
exports.escapeRegexp = void 0;
const tslib_1 = require("tslib");
const escape_string_regexp_1 = tslib_1.__importDefault(require("escape-string-regexp"));
function escapeRegexp(string) {
return (0, escape_string_regexp_1.default)(string);
}
exports.escapeRegexp = escapeRegexp;
//# sourceMappingURL=regExpUtils.js.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"regExpUtils.js","sourceRoot":"","sources":["../src/regExpUtils.ts"],"names":[],"mappings":";AAAA;;;;;GAKG;;;;AAEH,wFAAsD;AAEtD,SAAgB,YAAY,CAAC,MAAc;IACzC,OAAO,IAAA,8BAAkB,EAAC,MAAM,CAAC,CAAC;AACpC,CAAC;AAFD,oCAEC"}

8
node_modules/@docusaurus/utils/lib/shellUtils.d.ts generated vendored Normal file
View File

@@ -0,0 +1,8 @@
/**
* Copyright (c) Facebook, Inc. and its affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree.
*/
export declare function escapeShellArg(s: string): string;
//# sourceMappingURL=shellUtils.d.ts.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"shellUtils.d.ts","sourceRoot":"","sources":["../src/shellUtils.ts"],"names":[],"mappings":"AAAA;;;;;GAKG;AAQH,wBAAgB,cAAc,CAAC,CAAC,EAAE,MAAM,GAAG,MAAM,CAIhD"}

21
node_modules/@docusaurus/utils/lib/shellUtils.js generated vendored Normal file
View File

@@ -0,0 +1,21 @@
"use strict";
/**
* Copyright (c) Facebook, Inc. and its affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree.
*/
Object.defineProperty(exports, "__esModule", { value: true });
exports.escapeShellArg = void 0;
// TODO move from shelljs to execa later?
// Execa is well maintained and widely used
// Even shelljs recommends execa for security / escaping:
// https://github.com/shelljs/shelljs/wiki/Security-guidelines
// Inspired by https://github.com/xxorax/node-shell-escape/blob/master/shell-escape.js
function escapeShellArg(s) {
let res = `'${s.replace(/'/g, "'\\''")}'`;
res = res.replace(/^(?:'')+/g, '').replace(/\\'''/g, "\\'");
return res;
}
exports.escapeShellArg = escapeShellArg;
//# sourceMappingURL=shellUtils.js.map

1
node_modules/@docusaurus/utils/lib/shellUtils.js.map generated vendored Normal file
View File

@@ -0,0 +1 @@
{"version":3,"file":"shellUtils.js","sourceRoot":"","sources":["../src/shellUtils.ts"],"names":[],"mappings":";AAAA;;;;;GAKG;;;AAEH,yCAAyC;AACzC,2CAA2C;AAC3C,yDAAyD;AACzD,8DAA8D;AAE9D,sFAAsF;AACtF,SAAgB,cAAc,CAAC,CAAS;IACtC,IAAI,GAAG,GAAG,IAAI,CAAC,CAAC,OAAO,CAAC,IAAI,EAAE,OAAO,CAAC,GAAG,CAAC;IAC1C,GAAG,GAAG,GAAG,CAAC,OAAO,CAAC,WAAW,EAAE,EAAE,CAAC,CAAC,OAAO,CAAC,QAAQ,EAAE,KAAK,CAAC,CAAC;IAC5D,OAAO,GAAG,CAAC;AACb,CAAC;AAJD,wCAIC"}

24
node_modules/@docusaurus/utils/lib/slugger.d.ts generated vendored Normal file
View File

@@ -0,0 +1,24 @@
/**
* Copyright (c) Facebook, Inc. and its affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree.
*/
export type SluggerOptions = {
/** Keep the headings' casing, otherwise make all lowercase. */
maintainCase?: boolean;
};
export type Slugger = {
/**
* Takes a Markdown heading like "Josh Cena" and sluggifies it according to
* GitHub semantics (in this case `josh-cena`). Stateful, because if you try
* to sluggify "Josh Cena" again it would return `josh-cena-1`.
*/
slug: (value: string, options?: SluggerOptions) => string;
};
/**
* A thin wrapper around github-slugger. This is a factory function that returns
* a stateful Slugger object.
*/
export declare function createSlugger(): Slugger;
//# sourceMappingURL=slugger.d.ts.map

1
node_modules/@docusaurus/utils/lib/slugger.d.ts.map generated vendored Normal file
View File

@@ -0,0 +1 @@
{"version":3,"file":"slugger.d.ts","sourceRoot":"","sources":["../src/slugger.ts"],"names":[],"mappings":"AAAA;;;;;GAKG;AAOH,MAAM,MAAM,cAAc,GAAG;IAC3B,+DAA+D;IAC/D,YAAY,CAAC,EAAE,OAAO,CAAC;CACxB,CAAC;AAEF,MAAM,MAAM,OAAO,GAAG;IACpB;;;;OAIG;IACH,IAAI,EAAE,CAAC,KAAK,EAAE,MAAM,EAAE,OAAO,CAAC,EAAE,cAAc,KAAK,MAAM,CAAC;CAC3D,CAAC;AAEF;;;GAGG;AACH,wBAAgB,aAAa,IAAI,OAAO,CAKvC"}

23
node_modules/@docusaurus/utils/lib/slugger.js generated vendored Normal file
View File

@@ -0,0 +1,23 @@
"use strict";
/**
* Copyright (c) Facebook, Inc. and its affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree.
*/
Object.defineProperty(exports, "__esModule", { value: true });
exports.createSlugger = void 0;
const tslib_1 = require("tslib");
const github_slugger_1 = tslib_1.__importDefault(require("github-slugger"));
/**
* A thin wrapper around github-slugger. This is a factory function that returns
* a stateful Slugger object.
*/
function createSlugger() {
const githubSlugger = new github_slugger_1.default();
return {
slug: (value, options) => githubSlugger.slug(value, options?.maintainCase),
};
}
exports.createSlugger = createSlugger;
//# sourceMappingURL=slugger.js.map

1
node_modules/@docusaurus/utils/lib/slugger.js.map generated vendored Normal file
View File

@@ -0,0 +1 @@
{"version":3,"file":"slugger.js","sourceRoot":"","sources":["../src/slugger.ts"],"names":[],"mappings":";AAAA;;;;;GAKG;;;;AAEH,4EAA2C;AAmB3C;;;GAGG;AACH,SAAgB,aAAa;IAC3B,MAAM,aAAa,GAAG,IAAI,wBAAa,EAAE,CAAC;IAC1C,OAAO;QACL,IAAI,EAAE,CAAC,KAAK,EAAE,OAAO,EAAE,EAAE,CAAC,aAAa,CAAC,IAAI,CAAC,KAAK,EAAE,OAAO,EAAE,YAAY,CAAC;KAC3E,CAAC;AACJ,CAAC;AALD,sCAKC"}

73
node_modules/@docusaurus/utils/lib/tags.d.ts generated vendored Normal file
View File

@@ -0,0 +1,73 @@
/**
* Copyright (c) Facebook, Inc. and its affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree.
*/
/** What the user configures. */
export type Tag = {
label: string;
/** Permalink to this tag's page, without the `/tags/` base path. */
permalink: string;
};
/** What the tags list page should know about each tag. */
export type TagsListItem = Tag & {
/** Number of posts/docs with this tag. */
count: number;
};
/** What the tag's own page should know about the tag. */
export type TagModule = TagsListItem & {
/** The tags list page's permalink. */
allTagsPath: string;
/** Is this tag unlisted? (when it only contains unlisted items) */
unlisted: boolean;
};
export type FrontMatterTag = string | Tag;
/**
* Takes tag objects as they are defined in front matter, and normalizes each
* into a standard tag object. The permalink is created by appending the
* sluggified label to `tagsPath`. Front matter tags already containing
* permalinks would still have `tagsPath` prepended.
*
* The result will always be unique by permalinks. The behavior with colliding
* permalinks is undetermined.
*/
export declare function normalizeFrontMatterTags(
/** Base path to append the tag permalinks to. */
tagsPath: string,
/** Can be `undefined`, so that we can directly pipe in `frontMatter.tags`. */
frontMatterTags?: FrontMatterTag[] | undefined): Tag[];
type TaggedItemGroup<Item> = {
tag: Tag;
items: Item[];
};
/**
* Permits to group docs/blog posts by tag (provided by front matter).
*
* @returns a map from tag permalink to the items and other relevant tag data.
* The record is indexed by permalink, because routes must be unique in the end.
* Labels may vary on 2 MD files but they are normalized. Docs with
* label='some label' and label='some-label' should end up in the same page.
*/
export declare function groupTaggedItems<Item>(items: readonly Item[],
/**
* A callback telling me how to get the tags list of the current item. Usually
* simply getting it from some metadata of the current item.
*/
getItemTags: (item: Item) => readonly Tag[]): {
[permalink: string]: TaggedItemGroup<Item>;
};
/**
* Permits to get the "tag visibility" (hard to find a better name)
* IE, is this tag listed or unlisted
* And which items should be listed when this tag is browsed
*/
export declare function getTagVisibility<Item>({ items, isUnlisted, }: {
items: Item[];
isUnlisted: (item: Item) => boolean;
}): {
unlisted: boolean;
listedItems: Item[];
};
export {};
//# sourceMappingURL=tags.d.ts.map

1
node_modules/@docusaurus/utils/lib/tags.d.ts.map generated vendored Normal file
View File

@@ -0,0 +1 @@
{"version":3,"file":"tags.d.ts","sourceRoot":"","sources":["../src/tags.ts"],"names":[],"mappings":"AAAA;;;;;GAKG;AAKH,gCAAgC;AAChC,MAAM,MAAM,GAAG,GAAG;IAChB,KAAK,EAAE,MAAM,CAAC;IACd,oEAAoE;IACpE,SAAS,EAAE,MAAM,CAAC;CACnB,CAAC;AAEF,0DAA0D;AAC1D,MAAM,MAAM,YAAY,GAAG,GAAG,GAAG;IAC/B,0CAA0C;IAC1C,KAAK,EAAE,MAAM,CAAC;CACf,CAAC;AAEF,yDAAyD;AACzD,MAAM,MAAM,SAAS,GAAG,YAAY,GAAG;IACrC,sCAAsC;IACtC,WAAW,EAAE,MAAM,CAAC;IACpB,mEAAmE;IACnE,QAAQ,EAAE,OAAO,CAAC;CACnB,CAAC;AAEF,MAAM,MAAM,cAAc,GAAG,MAAM,GAAG,GAAG,CAAC;AAgC1C;;;;;;;;GAQG;AACH,wBAAgB,wBAAwB;AACtC,iDAAiD;AACjD,QAAQ,EAAE,MAAM;AAChB,8EAA8E;AAC9E,eAAe,GAAE,cAAc,EAAE,GAAG,SAAc,GACjD,GAAG,EAAE,CAMP;AAED,KAAK,eAAe,CAAC,IAAI,IAAI;IAC3B,GAAG,EAAE,GAAG,CAAC;IACT,KAAK,EAAE,IAAI,EAAE,CAAC;CACf,CAAC;AAEF;;;;;;;GAOG;AACH,wBAAgB,gBAAgB,CAAC,IAAI,EACnC,KAAK,EAAE,SAAS,IAAI,EAAE;AACtB;;;GAGG;AACH,WAAW,EAAE,CAAC,IAAI,EAAE,IAAI,KAAK,SAAS,GAAG,EAAE,GAC1C;IAAC,CAAC,SAAS,EAAE,MAAM,GAAG,eAAe,CAAC,IAAI,CAAC,CAAA;CAAC,CA0B9C;AAED;;;;GAIG;AACH,wBAAgB,gBAAgB,CAAC,IAAI,EAAE,EACrC,KAAK,EACL,UAAU,GACX,EAAE;IACD,KAAK,EAAE,IAAI,EAAE,CAAC;IACd,UAAU,EAAE,CAAC,IAAI,EAAE,IAAI,KAAK,OAAO,CAAC;CACrC,GAAG;IACF,QAAQ,EAAE,OAAO,CAAC;IAClB,WAAW,EAAE,IAAI,EAAE,CAAC;CACrB,CAaA"}

111
node_modules/@docusaurus/utils/lib/tags.js generated vendored Normal file
View File

@@ -0,0 +1,111 @@
"use strict";
/**
* Copyright (c) Facebook, Inc. and its affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree.
*/
Object.defineProperty(exports, "__esModule", { value: true });
exports.getTagVisibility = exports.groupTaggedItems = exports.normalizeFrontMatterTags = void 0;
const tslib_1 = require("tslib");
const lodash_1 = tslib_1.__importDefault(require("lodash"));
const urlUtils_1 = require("./urlUtils");
function normalizeFrontMatterTag(tagsPath, frontMatterTag) {
function toTagObject(tagString) {
return {
label: tagString,
permalink: lodash_1.default.kebabCase(tagString),
};
}
// TODO maybe make ensure the permalink is valid url path?
function normalizeTagPermalink(permalink) {
// Note: we always apply tagsPath on purpose. For versioned docs, v1/doc.md
// and v2/doc.md tags with custom permalinks don't lead to the same created
// page. tagsPath is different for each doc version
return (0, urlUtils_1.normalizeUrl)([tagsPath, permalink]);
}
const tag = typeof frontMatterTag === 'string'
? toTagObject(frontMatterTag)
: frontMatterTag;
return {
label: tag.label,
permalink: normalizeTagPermalink(tag.permalink),
};
}
/**
* Takes tag objects as they are defined in front matter, and normalizes each
* into a standard tag object. The permalink is created by appending the
* sluggified label to `tagsPath`. Front matter tags already containing
* permalinks would still have `tagsPath` prepended.
*
* The result will always be unique by permalinks. The behavior with colliding
* permalinks is undetermined.
*/
function normalizeFrontMatterTags(
/** Base path to append the tag permalinks to. */
tagsPath,
/** Can be `undefined`, so that we can directly pipe in `frontMatter.tags`. */
frontMatterTags = []) {
const tags = frontMatterTags.map((tag) => normalizeFrontMatterTag(tagsPath, tag));
return lodash_1.default.uniqBy(tags, (tag) => tag.permalink);
}
exports.normalizeFrontMatterTags = normalizeFrontMatterTags;
/**
* Permits to group docs/blog posts by tag (provided by front matter).
*
* @returns a map from tag permalink to the items and other relevant tag data.
* The record is indexed by permalink, because routes must be unique in the end.
* Labels may vary on 2 MD files but they are normalized. Docs with
* label='some label' and label='some-label' should end up in the same page.
*/
function groupTaggedItems(items,
/**
* A callback telling me how to get the tags list of the current item. Usually
* simply getting it from some metadata of the current item.
*/
getItemTags) {
const result = {};
items.forEach((item) => {
getItemTags(item).forEach((tag) => {
var _a;
// Init missing tag groups
// TODO: it's not really clear what should be the behavior if 2 tags have
// the same permalink but the label is different for each
// For now, the first tag found wins
result[_a = tag.permalink] ?? (result[_a] = {
tag,
items: [],
});
// Add item to group
result[tag.permalink].items.push(item);
});
});
// If user add twice the same tag to a md doc (weird but possible),
// we don't want the item to appear twice in the list...
Object.values(result).forEach((group) => {
group.items = lodash_1.default.uniq(group.items);
});
return result;
}
exports.groupTaggedItems = groupTaggedItems;
/**
* Permits to get the "tag visibility" (hard to find a better name)
* IE, is this tag listed or unlisted
* And which items should be listed when this tag is browsed
*/
function getTagVisibility({ items, isUnlisted, }) {
const allItemsUnlisted = items.every(isUnlisted);
// When a tag is full of unlisted items, we display all the items
// when tag is browsed, but we mark the tag as unlisted
if (allItemsUnlisted) {
return { unlisted: true, listedItems: items };
}
// When a tag has some listed items, the tag remains listed
// but we filter its unlisted items
return {
unlisted: false,
listedItems: items.filter((item) => !isUnlisted(item)),
};
}
exports.getTagVisibility = getTagVisibility;
//# sourceMappingURL=tags.js.map

1
node_modules/@docusaurus/utils/lib/tags.js.map generated vendored Normal file
View File

@@ -0,0 +1 @@
{"version":3,"file":"tags.js","sourceRoot":"","sources":["../src/tags.ts"],"names":[],"mappings":";AAAA;;;;;GAKG;;;;AAEH,4DAAuB;AACvB,yCAAwC;AAyBxC,SAAS,uBAAuB,CAC9B,QAAgB,EAChB,cAA8B;IAE9B,SAAS,WAAW,CAAC,SAAiB;QACpC,OAAO;YACL,KAAK,EAAE,SAAS;YAChB,SAAS,EAAE,gBAAC,CAAC,SAAS,CAAC,SAAS,CAAC;SAClC,CAAC;IACJ,CAAC;IAED,0DAA0D;IAC1D,SAAS,qBAAqB,CAAC,SAAiB;QAC9C,2EAA2E;QAC3E,2EAA2E;QAC3E,mDAAmD;QACnD,OAAO,IAAA,uBAAY,EAAC,CAAC,QAAQ,EAAE,SAAS,CAAC,CAAC,CAAC;IAC7C,CAAC;IAED,MAAM,GAAG,GACP,OAAO,cAAc,KAAK,QAAQ;QAChC,CAAC,CAAC,WAAW,CAAC,cAAc,CAAC;QAC7B,CAAC,CAAC,cAAc,CAAC;IAErB,OAAO;QACL,KAAK,EAAE,GAAG,CAAC,KAAK;QAChB,SAAS,EAAE,qBAAqB,CAAC,GAAG,CAAC,SAAS,CAAC;KAChD,CAAC;AACJ,CAAC;AAED;;;;;;;;GAQG;AACH,SAAgB,wBAAwB;AACtC,iDAAiD;AACjD,QAAgB;AAChB,8EAA8E;AAC9E,kBAAgD,EAAE;IAElD,MAAM,IAAI,GAAG,eAAe,CAAC,GAAG,CAAC,CAAC,GAAG,EAAE,EAAE,CACvC,uBAAuB,CAAC,QAAQ,EAAE,GAAG,CAAC,CACvC,CAAC;IAEF,OAAO,gBAAC,CAAC,MAAM,CAAC,IAAI,EAAE,CAAC,GAAG,EAAE,EAAE,CAAC,GAAG,CAAC,SAAS,CAAC,CAAC;AAChD,CAAC;AAXD,4DAWC;AAOD;;;;;;;GAOG;AACH,SAAgB,gBAAgB,CAC9B,KAAsB;AACtB;;;GAGG;AACH,WAA2C;IAE3C,MAAM,MAAM,GAAiD,EAAE,CAAC;IAEhE,KAAK,CAAC,OAAO,CAAC,CAAC,IAAI,EAAE,EAAE;QACrB,WAAW,CAAC,IAAI,CAAC,CAAC,OAAO,CAAC,CAAC,GAAG,EAAE,EAAE;;YAChC,0BAA0B;YAC1B,yEAAyE;YACzE,yDAAyD;YACzD,oCAAoC;YACpC,MAAM,MAAC,GAAG,CAAC,SAAS,MAApB,MAAM,OAAoB;gBACxB,GAAG;gBACH,KAAK,EAAE,EAAE;aACV,EAAC;YAEF,oBAAoB;YACpB,MAAM,CAAC,GAAG,CAAC,SAAS,CAAE,CAAC,KAAK,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;QAC1C,CAAC,CAAC,CAAC;IACL,CAAC,CAAC,CAAC;IAEH,mEAAmE;IACnE,wDAAwD;IACxD,MAAM,CAAC,MAAM,CAAC,MAAM,CAAC,CAAC,OAAO,CAAC,CAAC,KAAK,EAAE,EAAE;QACtC,KAAK,CAAC,KAAK,GAAG,gBAAC,CAAC,IAAI,CAAC,KAAK,CAAC,KAAK,CAAC,CAAC;IACpC,CAAC,CAAC,CAAC;IAEH,OAAO,MAAM,CAAC;AAChB,CAAC;AAjCD,4CAiCC;AAED;;;;GAIG;AACH,SAAgB,gBAAgB,CAAO,EACrC,KAAK,EACL,UAAU,GAIX;IAIC,MAAM,gBAAgB,GAAG,KAAK,CAAC,KAAK,CAAC,UAAU,CAAC,CAAC;IACjD,iEAAiE;IACjE,uDAAuD;IACvD,IAAI,gBAAgB,EAAE;QACpB,OAAO,EAAC,QAAQ,EAAE,IAAI,EAAE,WAAW,EAAE,KAAK,EAAC,CAAC;KAC7C;IACD,2DAA2D;IAC3D,mCAAmC;IACnC,OAAO;QACL,QAAQ,EAAE,KAAK;QACf,WAAW,EAAE,KAAK,CAAC,MAAM,CAAC,CAAC,IAAI,EAAE,EAAE,CAAC,CAAC,UAAU,CAAC,IAAI,CAAC,CAAC;KACvD,CAAC;AACJ,CAAC;AAtBD,4CAsBC"}

73
node_modules/@docusaurus/utils/lib/urlUtils.d.ts generated vendored Normal file
View File

@@ -0,0 +1,73 @@
/**
* Copyright (c) Facebook, Inc. and its affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree.
*/
/**
* Much like `path.join`, but much better. Takes an array of URL segments, and
* joins them into a reasonable URL.
*
* - `["file:", "/home", "/user/", "website"]` => `file:///home/user/website`
* - `["file://", "home", "/user/", "website"]` => `file://home/user/website` (relative!)
* - Remove trailing slash before parameters or hash.
* - Replace `?` in query parameters with `&`.
* - Dedupe forward slashes in the entire path, avoiding protocol slashes.
*
* @throws {TypeError} If any of the URL segment is not a string, this throws.
*/
export declare function normalizeUrl(rawUrls: string[]): string;
/**
* Takes a file's path, relative to its content folder, and computes its edit
* URL. If `editUrl` is `undefined`, this returns `undefined`, as is the case
* when the user doesn't want an edit URL in her config.
*/
export declare function getEditUrl(fileRelativePath: string, editUrl?: string): string | undefined;
/**
* Converts file path to a reasonable URL path, e.g. `'index.md'` -> `'/'`,
* `'foo/bar.js'` -> `'/foo/bar'`
*/
export declare function fileToPath(file: string): string;
/**
* Similar to `encodeURI`, but uses `encodeURIComponent` and assumes there's no
* query.
*
* `encodeURI("/question?/answer")` => `"/question?/answer#section"`;
* `encodePath("/question?/answer#section")` => `"/question%3F/answer%23foo"`
*/
export declare function encodePath(userPath: string): string;
/**
* Whether `str` is a valid pathname. It must be absolute, and not contain
* special characters.
*/
export declare function isValidPathname(str: string): boolean;
export type URLPath = {
pathname: string;
search?: string;
hash?: string;
};
export declare function parseURLPath(urlPath: string, fromPath?: string): URLPath;
export declare function serializeURLPath(urlPath: URLPath): string;
/**
* Resolve pathnames and fail-fast if resolution fails. Uses standard URL
* semantics (provided by `resolve-pathname` which is used internally by React
* router)
*/
export declare function resolvePathname(to: string, from?: string): string;
/** Appends a leading slash to `str`, if one doesn't exist. */
export declare function addLeadingSlash(str: string): string;
/** Appends a trailing slash to `str`, if one doesn't exist. */
export declare function addTrailingSlash(str: string): string;
/** Removes the trailing slash from `str`. */
export declare function removeTrailingSlash(str: string): string;
/** Constructs an SSH URL that can be used to push to GitHub. */
export declare function buildSshUrl(githubHost: string, organizationName: string, projectName: string, githubPort?: string): string;
/** Constructs an HTTP URL that can be used to push to GitHub. */
export declare function buildHttpsUrl(gitCredentials: string, githubHost: string, organizationName: string, projectName: string, githubPort?: string): string;
/**
* Whether the current URL is an SSH protocol. In addition to looking for
* `ssh:`, it will also allow protocol-less URLs like
* `git@github.com:facebook/docusaurus.git`.
*/
export declare function hasSSHProtocol(sourceRepoUrl: string): boolean;
//# sourceMappingURL=urlUtils.d.ts.map

1
node_modules/@docusaurus/utils/lib/urlUtils.d.ts.map generated vendored Normal file
View File

@@ -0,0 +1 @@
{"version":3,"file":"urlUtils.d.ts","sourceRoot":"","sources":["../src/urlUtils.ts"],"names":[],"mappings":"AAAA;;;;;GAKG;AAKH;;;;;;;;;;;GAWG;AACH,wBAAgB,YAAY,CAAC,OAAO,EAAE,MAAM,EAAE,GAAG,MAAM,CAoFtD;AAED;;;;GAIG;AACH,wBAAgB,UAAU,CACxB,gBAAgB,EAAE,MAAM,EACxB,OAAO,CAAC,EAAE,MAAM,GACf,MAAM,GAAG,SAAS,CAKpB;AAED;;;GAGG;AACH,wBAAgB,UAAU,CAAC,IAAI,EAAE,MAAM,GAAG,MAAM,CAQ/C;AAED;;;;;;GAMG;AACH,wBAAgB,UAAU,CAAC,QAAQ,EAAE,MAAM,GAAG,MAAM,CAKnD;AAED;;;GAGG;AACH,wBAAgB,eAAe,CAAC,GAAG,EAAE,MAAM,GAAG,OAAO,CAUpD;AAED,MAAM,MAAM,OAAO,GAAG;IAAC,QAAQ,EAAE,MAAM,CAAC;IAAC,MAAM,CAAC,EAAE,MAAM,CAAC;IAAC,IAAI,CAAC,EAAE,MAAM,CAAA;CAAC,CAAC;AAKzE,wBAAgB,YAAY,CAAC,OAAO,EAAE,MAAM,EAAE,QAAQ,CAAC,EAAE,MAAM,GAAG,OAAO,CA2CxE;AAED,wBAAgB,gBAAgB,CAAC,OAAO,EAAE,OAAO,GAAG,MAAM,CAIzD;AAED;;;;GAIG;AACH,wBAAgB,eAAe,CAAC,EAAE,EAAE,MAAM,EAAE,IAAI,CAAC,EAAE,MAAM,GAAG,MAAM,CAIjE;AAED,8DAA8D;AAC9D,wBAAgB,eAAe,CAAC,GAAG,EAAE,MAAM,GAAG,MAAM,CAEnD;AAGD,+DAA+D;AAC/D,wBAAgB,gBAAgB,CAAC,GAAG,EAAE,MAAM,GAAG,MAAM,CAEpD;AAED,6CAA6C;AAC7C,wBAAgB,mBAAmB,CAAC,GAAG,EAAE,MAAM,GAAG,MAAM,CAEvD;AAED,gEAAgE;AAChE,wBAAgB,WAAW,CACzB,UAAU,EAAE,MAAM,EAClB,gBAAgB,EAAE,MAAM,EACxB,WAAW,EAAE,MAAM,EACnB,UAAU,CAAC,EAAE,MAAM,GAClB,MAAM,CAKR;AAED,iEAAiE;AACjE,wBAAgB,aAAa,CAC3B,cAAc,EAAE,MAAM,EACtB,UAAU,EAAE,MAAM,EAClB,gBAAgB,EAAE,MAAM,EACxB,WAAW,EAAE,MAAM,EACnB,UAAU,CAAC,EAAE,MAAM,GAClB,MAAM,CAKR;AAED;;;;GAIG;AACH,wBAAgB,cAAc,CAAC,aAAa,EAAE,MAAM,GAAG,OAAO,CAU7D"}

256
node_modules/@docusaurus/utils/lib/urlUtils.js generated vendored Normal file
View File

@@ -0,0 +1,256 @@
"use strict";
/**
* Copyright (c) Facebook, Inc. and its affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree.
*/
Object.defineProperty(exports, "__esModule", { value: true });
exports.hasSSHProtocol = exports.buildHttpsUrl = exports.buildSshUrl = exports.removeTrailingSlash = exports.addTrailingSlash = exports.addLeadingSlash = exports.resolvePathname = exports.serializeURLPath = exports.parseURLPath = exports.isValidPathname = exports.encodePath = exports.fileToPath = exports.getEditUrl = exports.normalizeUrl = void 0;
const tslib_1 = require("tslib");
const resolve_pathname_1 = tslib_1.__importDefault(require("resolve-pathname"));
const jsUtils_1 = require("./jsUtils");
/**
* Much like `path.join`, but much better. Takes an array of URL segments, and
* joins them into a reasonable URL.
*
* - `["file:", "/home", "/user/", "website"]` => `file:///home/user/website`
* - `["file://", "home", "/user/", "website"]` => `file://home/user/website` (relative!)
* - Remove trailing slash before parameters or hash.
* - Replace `?` in query parameters with `&`.
* - Dedupe forward slashes in the entire path, avoiding protocol slashes.
*
* @throws {TypeError} If any of the URL segment is not a string, this throws.
*/
function normalizeUrl(rawUrls) {
const urls = [...rawUrls];
const resultArray = [];
let hasStartingSlash = false;
let hasEndingSlash = false;
const isNonEmptyArray = (arr) => arr.length > 0;
if (!isNonEmptyArray(urls)) {
return '';
}
// If the first part is a plain protocol, we combine it with the next part.
if (urls[0].match(/^[^/:]+:\/*$/) && urls.length > 1) {
const first = urls.shift();
if (first.startsWith('file:') && urls[0].startsWith('/')) {
// Force a double slash here, else we lose the information that the next
// segment is an absolute path
urls[0] = `${first}//${urls[0]}`;
}
else {
urls[0] = first + urls[0];
}
}
// There must be two or three slashes in the file protocol,
// two slashes in anything else.
const replacement = urls[0].match(/^file:\/\/\//) ? '$1:///' : '$1://';
urls[0] = urls[0].replace(/^(?<protocol>[^/:]+):\/*/, replacement);
for (let i = 0; i < urls.length; i += 1) {
let component = urls[i];
if (typeof component !== 'string') {
throw new TypeError(`Url must be a string. Received ${typeof component}`);
}
if (component === '') {
if (i === urls.length - 1 && hasEndingSlash) {
resultArray.push('/');
}
continue;
}
if (component !== '/') {
if (i > 0) {
// Removing the starting slashes for each component but the first.
component = component.replace(/^\/+/,
// Special case where the first element of rawUrls is empty
// ["", "/hello"] => /hello
component.startsWith('/') && !hasStartingSlash ? '/' : '');
}
hasEndingSlash = component.endsWith('/');
// Removing the ending slashes for each component but the last. For the
// last component we will combine multiple slashes to a single one.
component = component.replace(/\/+$/, i < urls.length - 1 ? '' : '/');
}
hasStartingSlash = true;
resultArray.push(component);
}
let str = resultArray.join('/');
// Each input component is now separated by a single slash except the possible
// first plain protocol part.
// Remove trailing slash before parameters or hash.
str = str.replace(/\/(?<search>\?|&|#[^!])/g, '$1');
// Replace ? in parameters with &.
const parts = str.split('?');
str = parts.shift() + (parts.length > 0 ? '?' : '') + parts.join('&');
// Dedupe forward slashes in the entire path, avoiding protocol slashes.
str = str.replace(/(?<textBefore>[^:/]\/)\/+/g, '$1');
// Dedupe forward slashes at the beginning of the path.
str = str.replace(/^\/+/g, '/');
return str;
}
exports.normalizeUrl = normalizeUrl;
/**
* Takes a file's path, relative to its content folder, and computes its edit
* URL. If `editUrl` is `undefined`, this returns `undefined`, as is the case
* when the user doesn't want an edit URL in her config.
*/
function getEditUrl(fileRelativePath, editUrl) {
return editUrl
? // Don't use posixPath for this: we need to force a forward slash path
normalizeUrl([editUrl, fileRelativePath.replace(/\\/g, '/')])
: undefined;
}
exports.getEditUrl = getEditUrl;
/**
* Converts file path to a reasonable URL path, e.g. `'index.md'` -> `'/'`,
* `'foo/bar.js'` -> `'/foo/bar'`
*/
function fileToPath(file) {
const indexRE = /(?<dirname>^|.*\/)index\.(?:mdx?|jsx?|tsx?)$/i;
const extRE = /\.(?:mdx?|jsx?|tsx?)$/;
if (indexRE.test(file)) {
return file.replace(indexRE, '/$1');
}
return `/${file.replace(extRE, '').replace(/\\/g, '/')}`;
}
exports.fileToPath = fileToPath;
/**
* Similar to `encodeURI`, but uses `encodeURIComponent` and assumes there's no
* query.
*
* `encodeURI("/question?/answer")` => `"/question?/answer#section"`;
* `encodePath("/question?/answer#section")` => `"/question%3F/answer%23foo"`
*/
function encodePath(userPath) {
return userPath
.split('/')
.map((item) => encodeURIComponent(item))
.join('/');
}
exports.encodePath = encodePath;
/**
* Whether `str` is a valid pathname. It must be absolute, and not contain
* special characters.
*/
function isValidPathname(str) {
if (!str.startsWith('/')) {
return false;
}
try {
const parsedPathname = new URL(str, 'https://domain.com').pathname;
return parsedPathname === str || parsedPathname === encodeURI(str);
}
catch {
return false;
}
}
exports.isValidPathname = isValidPathname;
// Let's name the concept of (pathname + search + hash) as URLPath
// See also https://twitter.com/kettanaito/status/1741768992866308120
// Note: this function also resolves relative pathnames while parsing!
function parseURLPath(urlPath, fromPath) {
function parseURL(url, base) {
try {
// A possible alternative? https://github.com/unjs/ufo#url
return new URL(url, base ?? 'https://example.com');
}
catch (e) {
throw new Error(`Can't parse URL ${url}${base ? ` with base ${base}` : ''}`, { cause: e });
}
}
const base = fromPath ? parseURL(fromPath) : undefined;
const url = parseURL(urlPath, base);
const { pathname } = url;
// Fixes annoying url.search behavior
// "" => undefined
// "?" => ""
// "?param => "param"
const search = url.search
? url.search.slice(1)
: urlPath.includes('?')
? ''
: undefined;
// Fixes annoying url.hash behavior
// "" => undefined
// "#" => ""
// "?param => "param"
const hash = url.hash
? url.hash.slice(1)
: urlPath.includes('#')
? ''
: undefined;
return {
pathname,
search,
hash,
};
}
exports.parseURLPath = parseURLPath;
function serializeURLPath(urlPath) {
const search = urlPath.search === undefined ? '' : `?${urlPath.search}`;
const hash = urlPath.hash === undefined ? '' : `#${urlPath.hash}`;
return `${urlPath.pathname}${search}${hash}`;
}
exports.serializeURLPath = serializeURLPath;
/**
* Resolve pathnames and fail-fast if resolution fails. Uses standard URL
* semantics (provided by `resolve-pathname` which is used internally by React
* router)
*/
function resolvePathname(to, from) {
// TODO do we really need resolve-pathname lib anymore?
// possible alternative: decodeURI(parseURLPath(to, from).pathname);
return (0, resolve_pathname_1.default)(to, from);
}
exports.resolvePathname = resolvePathname;
/** Appends a leading slash to `str`, if one doesn't exist. */
function addLeadingSlash(str) {
return (0, jsUtils_1.addPrefix)(str, '/');
}
exports.addLeadingSlash = addLeadingSlash;
// TODO deduplicate: also present in @docusaurus/utils-common
/** Appends a trailing slash to `str`, if one doesn't exist. */
function addTrailingSlash(str) {
return (0, jsUtils_1.addSuffix)(str, '/');
}
exports.addTrailingSlash = addTrailingSlash;
/** Removes the trailing slash from `str`. */
function removeTrailingSlash(str) {
return (0, jsUtils_1.removeSuffix)(str, '/');
}
exports.removeTrailingSlash = removeTrailingSlash;
/** Constructs an SSH URL that can be used to push to GitHub. */
function buildSshUrl(githubHost, organizationName, projectName, githubPort) {
if (githubPort) {
return `ssh://git@${githubHost}:${githubPort}/${organizationName}/${projectName}.git`;
}
return `git@${githubHost}:${organizationName}/${projectName}.git`;
}
exports.buildSshUrl = buildSshUrl;
/** Constructs an HTTP URL that can be used to push to GitHub. */
function buildHttpsUrl(gitCredentials, githubHost, organizationName, projectName, githubPort) {
if (githubPort) {
return `https://${gitCredentials}@${githubHost}:${githubPort}/${organizationName}/${projectName}.git`;
}
return `https://${gitCredentials}@${githubHost}/${organizationName}/${projectName}.git`;
}
exports.buildHttpsUrl = buildHttpsUrl;
/**
* Whether the current URL is an SSH protocol. In addition to looking for
* `ssh:`, it will also allow protocol-less URLs like
* `git@github.com:facebook/docusaurus.git`.
*/
function hasSSHProtocol(sourceRepoUrl) {
try {
if (new URL(sourceRepoUrl).protocol === 'ssh:') {
return true;
}
return false;
}
catch {
// Fails when there isn't a protocol
return /^(?:[\w-]+@)?[\w.-]+:[\w./-]+/.test(sourceRepoUrl);
}
}
exports.hasSSHProtocol = hasSSHProtocol;
//# sourceMappingURL=urlUtils.js.map

1
node_modules/@docusaurus/utils/lib/urlUtils.js.map generated vendored Normal file

File diff suppressed because one or more lines are too long

38
node_modules/@docusaurus/utils/lib/webpackUtils.d.ts generated vendored Normal file
View File

@@ -0,0 +1,38 @@
/**
* Copyright (c) Facebook, Inc. and its affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree.
*/
import type { RuleSetRule, LoaderContext } from 'webpack';
export type WebpackCompilerName = 'server' | 'client';
export declare function getWebpackLoaderCompilerName(context: LoaderContext<unknown>): WebpackCompilerName;
type AssetFolder = 'images' | 'files' | 'fonts' | 'medias';
type FileLoaderUtils = {
loaders: {
file: (options: {
folder: AssetFolder;
}) => RuleSetRule;
url: (options: {
folder: AssetFolder;
}) => RuleSetRule;
inlineMarkdownImageFileLoader: string;
inlineMarkdownAssetImageFileLoader: string;
inlineMarkdownLinkFileLoader: string;
};
rules: {
images: () => RuleSetRule;
fonts: () => RuleSetRule;
media: () => RuleSetRule;
svg: () => RuleSetRule;
otherAssets: () => RuleSetRule;
};
};
/**
* Returns unified loader configurations to be used for various file types.
*
* Inspired by https://github.com/gatsbyjs/gatsby/blob/8e6e021014da310b9cc7d02e58c9b3efe938c665/packages/gatsby/src/utils/webpack-utils.ts#L447
*/
export declare function getFileLoaderUtils(): FileLoaderUtils;
export {};
//# sourceMappingURL=webpackUtils.d.ts.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"webpackUtils.d.ts","sourceRoot":"","sources":["../src/webpackUtils.ts"],"names":[],"mappings":"AAAA;;;;;GAKG;AAQH,OAAO,KAAK,EAAC,WAAW,EAAE,aAAa,EAAC,MAAM,SAAS,CAAC;AAExD,MAAM,MAAM,mBAAmB,GAAG,QAAQ,GAAG,QAAQ,CAAC;AAEtD,wBAAgB,4BAA4B,CAC1C,OAAO,EAAE,aAAa,CAAC,OAAO,CAAC,GAC9B,mBAAmB,CAYrB;AAED,KAAK,WAAW,GAAG,QAAQ,GAAG,OAAO,GAAG,OAAO,GAAG,QAAQ,CAAC;AAE3D,KAAK,eAAe,GAAG;IACrB,OAAO,EAAE;QACP,IAAI,EAAE,CAAC,OAAO,EAAE;YAAC,MAAM,EAAE,WAAW,CAAA;SAAC,KAAK,WAAW,CAAC;QACtD,GAAG,EAAE,CAAC,OAAO,EAAE;YAAC,MAAM,EAAE,WAAW,CAAA;SAAC,KAAK,WAAW,CAAC;QACrD,6BAA6B,EAAE,MAAM,CAAC;QACtC,kCAAkC,EAAE,MAAM,CAAC;QAC3C,4BAA4B,EAAE,MAAM,CAAC;KACtC,CAAC;IACF,KAAK,EAAE;QACL,MAAM,EAAE,MAAM,WAAW,CAAC;QAC1B,KAAK,EAAE,MAAM,WAAW,CAAC;QACzB,KAAK,EAAE,MAAM,WAAW,CAAC;QACzB,GAAG,EAAE,MAAM,WAAW,CAAC;QACvB,WAAW,EAAE,MAAM,WAAW,CAAC;KAChC,CAAC;CACH,CAAC;AAEF;;;;GAIG;AACH,wBAAgB,kBAAkB,IAAI,eAAe,CAqHpD"}

128
node_modules/@docusaurus/utils/lib/webpackUtils.js generated vendored Normal file
View File

@@ -0,0 +1,128 @@
"use strict";
/**
* Copyright (c) Facebook, Inc. and its affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree.
*/
Object.defineProperty(exports, "__esModule", { value: true });
exports.getFileLoaderUtils = exports.getWebpackLoaderCompilerName = void 0;
const tslib_1 = require("tslib");
const path_1 = tslib_1.__importDefault(require("path"));
const pathUtils_1 = require("./pathUtils");
const constants_1 = require("./constants");
function getWebpackLoaderCompilerName(context) {
// eslint-disable-next-line no-underscore-dangle
const compilerName = context._compiler?.name;
switch (compilerName) {
case 'server':
case 'client':
return compilerName;
default:
throw new Error(`Cannot get valid Docusaurus webpack compiler name. Found compilerName=${compilerName}`);
}
}
exports.getWebpackLoaderCompilerName = getWebpackLoaderCompilerName;
/**
* Returns unified loader configurations to be used for various file types.
*
* Inspired by https://github.com/gatsbyjs/gatsby/blob/8e6e021014da310b9cc7d02e58c9b3efe938c665/packages/gatsby/src/utils/webpack-utils.ts#L447
*/
function getFileLoaderUtils() {
// Files/images < urlLoaderLimit will be inlined as base64 strings directly in
// the html
const urlLoaderLimit = constants_1.WEBPACK_URL_LOADER_LIMIT;
const fileLoaderFileName = (folder) => path_1.default.posix.join(constants_1.OUTPUT_STATIC_ASSETS_DIR_NAME, folder, '[name]-[contenthash].[ext]');
const loaders = {
file: (options) => ({
loader: require.resolve(`file-loader`),
options: {
name: fileLoaderFileName(options.folder),
},
}),
url: (options) => ({
loader: require.resolve('url-loader'),
options: {
limit: urlLoaderLimit,
name: fileLoaderFileName(options.folder),
fallback: require.resolve('file-loader'),
},
}),
// TODO avoid conflicts with the ideal-image plugin
// TODO this may require a little breaking change for ideal-image users?
// Maybe with the ideal image plugin, all md images should be "ideal"?
// This is used to force url-loader+file-loader on markdown images
// https://webpack.js.org/concepts/loaders/#inline
inlineMarkdownImageFileLoader: `!${(0, pathUtils_1.escapePath)(require.resolve('url-loader'))}?limit=${urlLoaderLimit}&name=${fileLoaderFileName('images')}&fallback=${(0, pathUtils_1.escapePath)(require.resolve('file-loader'))}!`,
inlineMarkdownAssetImageFileLoader: `!${(0, pathUtils_1.escapePath)(require.resolve('file-loader'))}?name=${fileLoaderFileName('images')}!`,
inlineMarkdownLinkFileLoader: `!${(0, pathUtils_1.escapePath)(require.resolve('file-loader'))}?name=${fileLoaderFileName('files')}!`,
};
const rules = {
/**
* Loads image assets, inlines images via a data URI if they are below
* the size threshold
*/
images: () => ({
use: [loaders.url({ folder: 'images' })],
test: /\.(?:ico|jpe?g|png|gif|webp|avif)(?:\?.*)?$/i,
}),
fonts: () => ({
use: [loaders.url({ folder: 'fonts' })],
test: /\.(?:woff2?|eot|ttf|otf)$/i,
}),
/**
* Loads audio and video and inlines them via a data URI if they are below
* the size threshold
*/
media: () => ({
use: [loaders.url({ folder: 'medias' })],
test: /\.(?:mp4|avi|mov|mkv|mpg|mpeg|vob|wmv|m4v|webm|ogv|wav|mp3|m4a|aac|oga|flac)$/i,
}),
svg: () => ({
test: /\.svg$/i,
oneOf: [
{
use: [
{
loader: require.resolve('@svgr/webpack'),
options: {
prettier: false,
svgo: true,
svgoConfig: {
plugins: [
{
name: 'preset-default',
params: {
overrides: {
removeTitle: false,
removeViewBox: false,
},
},
},
],
},
titleProp: true,
ref: ![path_1.default],
},
},
],
// We don't want to use SVGR loader for non-React source code
// ie we don't want to use SVGR for CSS files...
issuer: {
and: [/\.(?:tsx?|jsx?|mdx?)$/i],
},
},
{
use: [loaders.url({ folder: 'images' })],
},
],
}),
otherAssets: () => ({
use: [loaders.file({ folder: 'files' })],
test: /\.(?:pdf|docx?|xlsx?|zip|rar)$/i,
}),
};
return { loaders, rules };
}
exports.getFileLoaderUtils = getFileLoaderUtils;
//# sourceMappingURL=webpackUtils.js.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"webpackUtils.js","sourceRoot":"","sources":["../src/webpackUtils.ts"],"names":[],"mappings":";AAAA;;;;;GAKG;;;;AAEH,wDAAwB;AACxB,2CAAuC;AACvC,2CAGqB;AAKrB,SAAgB,4BAA4B,CAC1C,OAA+B;IAE/B,gDAAgD;IAChD,MAAM,YAAY,GAAG,OAAO,CAAC,SAAS,EAAE,IAAI,CAAC;IAC7C,QAAQ,YAAY,EAAE;QACpB,KAAK,QAAQ,CAAC;QACd,KAAK,QAAQ;YACX,OAAO,YAAY,CAAC;QACtB;YACE,MAAM,IAAI,KAAK,CACb,yEAAyE,YAAY,EAAE,CACxF,CAAC;KACL;AACH,CAAC;AAdD,oEAcC;AAqBD;;;;GAIG;AACH,SAAgB,kBAAkB;IAChC,8EAA8E;IAC9E,WAAW;IACX,MAAM,cAAc,GAAG,oCAAwB,CAAC;IAEhD,MAAM,kBAAkB,GAAG,CAAC,MAAmB,EAAE,EAAE,CACjD,cAAI,CAAC,KAAK,CAAC,IAAI,CACb,yCAA6B,EAC7B,MAAM,EACN,4BAA4B,CAC7B,CAAC;IAEJ,MAAM,OAAO,GAA+B;QAC1C,IAAI,EAAE,CAAC,OAA8B,EAAE,EAAE,CAAC,CAAC;YACzC,MAAM,EAAE,OAAO,CAAC,OAAO,CAAC,aAAa,CAAC;YACtC,OAAO,EAAE;gBACP,IAAI,EAAE,kBAAkB,CAAC,OAAO,CAAC,MAAM,CAAC;aACzC;SACF,CAAC;QACF,GAAG,EAAE,CAAC,OAA8B,EAAE,EAAE,CAAC,CAAC;YACxC,MAAM,EAAE,OAAO,CAAC,OAAO,CAAC,YAAY,CAAC;YACrC,OAAO,EAAE;gBACP,KAAK,EAAE,cAAc;gBACrB,IAAI,EAAE,kBAAkB,CAAC,OAAO,CAAC,MAAM,CAAC;gBACxC,QAAQ,EAAE,OAAO,CAAC,OAAO,CAAC,aAAa,CAAC;aACzC;SACF,CAAC;QAEF,mDAAmD;QACnD,wEAAwE;QACxE,sEAAsE;QACtE,kEAAkE;QAClE,kDAAkD;QAClD,6BAA6B,EAAE,IAAI,IAAA,sBAAU,EAC3C,OAAO,CAAC,OAAO,CAAC,YAAY,CAAC,CAC9B,UAAU,cAAc,SAAS,kBAAkB,CAClD,QAAQ,CACT,aAAa,IAAA,sBAAU,EAAC,OAAO,CAAC,OAAO,CAAC,aAAa,CAAC,CAAC,GAAG;QAC3D,kCAAkC,EAAE,IAAI,IAAA,sBAAU,EAChD,OAAO,CAAC,OAAO,CAAC,aAAa,CAAC,CAC/B,SAAS,kBAAkB,CAAC,QAAQ,CAAC,GAAG;QACzC,4BAA4B,EAAE,IAAI,IAAA,sBAAU,EAC1C,OAAO,CAAC,OAAO,CAAC,aAAa,CAAC,CAC/B,SAAS,kBAAkB,CAAC,OAAO,CAAC,GAAG;KACzC,CAAC;IAEF,MAAM,KAAK,GAA6B;QACtC;;;WAGG;QACH,MAAM,EAAE,GAAG,EAAE,CAAC,CAAC;YACb,GAAG,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,EAAC,MAAM,EAAE,QAAQ,EAAC,CAAC,CAAC;YACtC,IAAI,EAAE,8CAA8C;SACrD,CAAC;QAEF,KAAK,EAAE,GAAG,EAAE,CAAC,CAAC;YACZ,GAAG,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,EAAC,MAAM,EAAE,OAAO,EAAC,CAAC,CAAC;YACrC,IAAI,EAAE,4BAA4B;SACnC,CAAC;QAEF;;;WAGG;QACH,KAAK,EAAE,GAAG,EAAE,CAAC,CAAC;YACZ,GAAG,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,EAAC,MAAM,EAAE,QAAQ,EAAC,CAAC,CAAC;YACtC,IAAI,EAAE,gFAAgF;SACvF,CAAC;QAEF,GAAG,EAAE,GAAG,EAAE,CAAC,CAAC;YACV,IAAI,EAAE,SAAS;YACf,KAAK,EAAE;gBACL;oBACE,GAAG,EAAE;wBACH;4BACE,MAAM,EAAE,OAAO,CAAC,OAAO,CAAC,eAAe,CAAC;4BACxC,OAAO,EAAE;gCACP,QAAQ,EAAE,KAAK;gCACf,IAAI,EAAE,IAAI;gCACV,UAAU,EAAE;oCACV,OAAO,EAAE;wCACP;4CACE,IAAI,EAAE,gBAAgB;4CACtB,MAAM,EAAE;gDACN,SAAS,EAAE;oDACT,WAAW,EAAE,KAAK;oDAClB,aAAa,EAAE,KAAK;iDACrB;6CACF;yCACF;qCACF;iCACF;gCACD,SAAS,EAAE,IAAI;gCACf,GAAG,EAAE,CAAC,CAAC,cAAI,CAAC;6BACb;yBACF;qBACF;oBACD,6DAA6D;oBAC7D,gDAAgD;oBAChD,MAAM,EAAE;wBACN,GAAG,EAAE,CAAC,wBAAwB,CAAC;qBAChC;iBACF;gBACD;oBACE,GAAG,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,EAAC,MAAM,EAAE,QAAQ,EAAC,CAAC,CAAC;iBACvC;aACF;SACF,CAAC;QAEF,WAAW,EAAE,GAAG,EAAE,CAAC,CAAC;YAClB,GAAG,EAAE,CAAC,OAAO,CAAC,IAAI,CAAC,EAAC,MAAM,EAAE,OAAO,EAAC,CAAC,CAAC;YACtC,IAAI,EAAE,iCAAiC;SACxC,CAAC;KACH,CAAC;IAEF,OAAO,EAAC,OAAO,EAAE,KAAK,EAAC,CAAC;AAC1B,CAAC;AArHD,gDAqHC"}

60
node_modules/@docusaurus/utils/package.json generated vendored Normal file
View File

@@ -0,0 +1,60 @@
{
"name": "@docusaurus/utils",
"version": "3.1.1",
"description": "Node utility functions for Docusaurus packages.",
"main": "./lib/index.js",
"types": "./lib/index.d.ts",
"scripts": {
"build": "tsc",
"watch": "tsc --watch"
},
"publishConfig": {
"access": "public"
},
"repository": {
"type": "git",
"url": "https://github.com/facebook/docusaurus.git",
"directory": "packages/docusaurus-utils"
},
"license": "MIT",
"dependencies": {
"@docusaurus/logger": "3.1.1",
"@svgr/webpack": "^6.5.1",
"escape-string-regexp": "^4.0.0",
"file-loader": "^6.2.0",
"fs-extra": "^11.1.1",
"github-slugger": "^1.5.0",
"globby": "^11.1.0",
"gray-matter": "^4.0.3",
"jiti": "^1.20.0",
"js-yaml": "^4.1.0",
"lodash": "^4.17.21",
"micromatch": "^4.0.5",
"resolve-pathname": "^3.0.0",
"shelljs": "^0.8.5",
"tslib": "^2.6.0",
"url-loader": "^4.1.1",
"webpack": "^5.88.1"
},
"engines": {
"node": ">=18.0"
},
"devDependencies": {
"@docusaurus/types": "3.1.1",
"@types/dedent": "^0.7.0",
"@types/github-slugger": "^1.3.0",
"@types/micromatch": "^4.0.2",
"@types/react-dom": "^18.2.7",
"dedent": "^0.7.0",
"tmp-promise": "^3.0.3"
},
"peerDependencies": {
"@docusaurus/types": "*"
},
"peerDependenciesMeta": {
"@docusaurus/types": {
"optional": true
}
},
"gitHead": "8017f6a6776ba1bd7065e630a52fe2c2654e2f1b"
}

100
node_modules/@docusaurus/utils/src/constants.ts generated vendored Normal file
View File

@@ -0,0 +1,100 @@
/**
* Copyright (c) Facebook, Inc. and its affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree.
*/
/** Node major version, directly read from env. */
export const NODE_MAJOR_VERSION = parseInt(
process.versions.node.split('.')[0]!,
10,
);
/** Node minor version, directly read from env. */
export const NODE_MINOR_VERSION = parseInt(
process.versions.node.split('.')[1]!,
10,
);
/** Docusaurus core version. */
export const DOCUSAURUS_VERSION =
// eslint-disable-next-line global-require, @typescript-eslint/no-var-requires
(require('../package.json') as {version: string}).version;
/**
* Can be overridden with cli option `--out-dir`. Code should generally use
* `context.outDir` instead (which is always absolute and localized).
*/
export const DEFAULT_BUILD_DIR_NAME = 'build';
/**
* Can be overridden with cli option `--config`. Code should generally use
* `context.siteConfigPath` instead (which is always absolute).
*
* This does not have extensions, so that we can substitute different ones
* when resolving the path.
*/
export const DEFAULT_CONFIG_FILE_NAME = 'docusaurus.config';
/** Can be absolute or relative to site directory. */
export const BABEL_CONFIG_FILE_NAME =
process.env.DOCUSAURUS_BABEL_CONFIG_FILE_NAME ?? 'babel.config.js';
/**
* Can be absolute or relative to site directory. Code should generally use
* `context.generatedFilesDir` instead (which is always absolute).
*/
export const GENERATED_FILES_DIR_NAME =
process.env.DOCUSAURUS_GENERATED_FILES_DIR_NAME ?? '.docusaurus';
/**
* We would assume all of the site's JS code lives in here and not outside.
* Relative to the site directory.
*/
export const SRC_DIR_NAME = 'src';
/**
* Can be overridden with `config.staticDirectories`. Code should use
* `context.siteConfig.staticDirectories` instead (which is always absolute).
*/
export const DEFAULT_STATIC_DIR_NAME = 'static';
/**
* Files here are handled by webpack, hashed (can be cached aggressively).
* Relative to the build output folder.
*/
export const OUTPUT_STATIC_ASSETS_DIR_NAME = 'assets';
/**
* Components in this directory will receive the `@theme` alias and be able to
* shadow default theme components.
*/
export const THEME_PATH = `${SRC_DIR_NAME}/theme`;
/**
* All translation-related data live here, relative to site directory. Content
* will be namespaced by locale.
*/
export const DEFAULT_I18N_DIR_NAME = 'i18n';
/**
* Translations for React code.
*/
export const CODE_TRANSLATIONS_FILE_NAME = 'code.json';
/** Dev server opens on this port by default. */
export const DEFAULT_PORT = process.env.PORT
? parseInt(process.env.PORT, 10)
: 3000;
/** Default plugin ID. */
export const DEFAULT_PLUGIN_ID = 'default';
/**
* Allow overriding the limit after which the url loader will no longer inline
* assets.
*
* @see https://github.com/facebook/docusaurus/issues/5493
*/
export const WEBPACK_URL_LOADER_LIMIT =
process.env.WEBPACK_URL_LOADER_LIMIT ?? 10000;

View File

@@ -0,0 +1,54 @@
/**
* Copyright (c) Facebook, Inc. and its affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree.
*/
type Env = 'production' | 'development';
/**
* To easily work on draft/unlisted in dev mode, use this env variable!
* SIMULATE_PRODUCTION_VISIBILITY=true yarn start:website
*/
const simulateProductionVisibility =
process.env.SIMULATE_PRODUCTION_VISIBILITY === 'true';
/**
* draft/unlisted is a production-only concept
* In dev it is ignored and all content files are included
*/
function isProduction(env: Env | undefined): boolean {
return (
simulateProductionVisibility ||
(env ?? process.env.NODE_ENV) === 'production'
);
}
/**
* A draft content will not be included in the production build
*/
export function isDraft({
frontMatter,
env,
}: {
frontMatter: {draft?: boolean};
env?: Env;
}): boolean {
return (isProduction(env) && frontMatter.draft) ?? false;
}
/**
* An unlisted content will be included in the production build, but hidden.
* It is excluded from sitemap, has noIndex, does not appear in lists etc...
* Only users having the link can find it.
*/
export function isUnlisted({
frontMatter,
env,
}: {
frontMatter: {unlisted?: boolean};
env?: Env;
}): boolean {
return (isProduction(env) && frontMatter.unlisted) ?? false;
}

122
node_modules/@docusaurus/utils/src/dataFileUtils.ts generated vendored Normal file
View File

@@ -0,0 +1,122 @@
/**
* Copyright (c) Facebook, Inc. and its affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree.
*/
import fs from 'fs-extra';
import path from 'path';
import logger from '@docusaurus/logger';
import Yaml from 'js-yaml';
import {findAsyncSequential} from './index';
import type {ContentPaths} from './markdownLinks';
type DataFileParams = {
/** Path to the potential data file, relative to `contentPaths` */
filePath: string;
/**
* Includes the base path and localized path, both of which are eligible for
* sourcing data files. Both paths should be absolute.
*/
contentPaths: ContentPaths;
};
/**
* Looks for a data file in the potential content paths; loads a localized data
* file in priority.
*
* @returns An absolute path to the data file, or `undefined` if there isn't one.
*/
export async function getDataFilePath({
filePath,
contentPaths,
}: DataFileParams): Promise<string | undefined> {
const contentPath = await findFolderContainingFile(
getContentPathList(contentPaths),
filePath,
);
if (contentPath) {
return path.resolve(contentPath, filePath);
}
return undefined;
}
/**
* Looks up for a data file in the content paths, returns the object validated
* and normalized according to the `validate` callback.
*
* @returns `undefined` when file not found
* @throws Throws when validation fails, displaying a helpful context message.
*/
export async function getDataFileData<T>(
params: DataFileParams & {
/** Used for the "The X file looks invalid" message. */
fileType: string;
},
validate: (content: unknown) => T,
): Promise<T | undefined> {
const filePath = await getDataFilePath(params);
if (!filePath) {
return undefined;
}
try {
const contentString = await fs.readFile(filePath, {encoding: 'utf8'});
const unsafeContent = Yaml.load(contentString);
return validate(unsafeContent);
} catch (err) {
logger.error`The ${params.fileType} file at path=${filePath} looks invalid.`;
throw err;
}
}
/**
* Takes the `contentPaths` data structure and returns an ordered path list
* indicating their priorities. For all data, we look in the localized folder
* in priority.
*/
export function getContentPathList(contentPaths: ContentPaths): string[] {
return [contentPaths.contentPathLocalized, contentPaths.contentPath];
}
/**
* @param folderPaths a list of absolute paths.
* @param relativeFilePath file path relative to each `folderPaths`.
* @returns the first folder path in which the file exists, or `undefined` if
* none is found.
*/
export async function findFolderContainingFile(
folderPaths: string[],
relativeFilePath: string,
): Promise<string | undefined> {
return findAsyncSequential(folderPaths, (folderPath) =>
fs.pathExists(path.join(folderPath, relativeFilePath)),
);
}
/**
* Fail-fast alternative to `findFolderContainingFile`.
*
* @param folderPaths a list of absolute paths.
* @param relativeFilePath file path relative to each `folderPaths`.
* @returns the first folder path in which the file exists.
* @throws Throws if no file can be found. You should use this method only when
* you actually know the file exists (e.g. when the `relativeFilePath` is read
* with a glob and you are just trying to localize it)
*/
export async function getFolderContainingFile(
folderPaths: string[],
relativeFilePath: string,
): Promise<string> {
const maybeFolderPath = await findFolderContainingFile(
folderPaths,
relativeFilePath,
);
if (!maybeFolderPath) {
throw new Error(
`File "${relativeFilePath}" does not exist in any of these folders:
- ${folderPaths.join('\n- ')}`,
);
}
return maybeFolderPath;
}

10
node_modules/@docusaurus/utils/src/deps.d.ts generated vendored Normal file
View File

@@ -0,0 +1,10 @@
/**
* Copyright (c) Facebook, Inc. and its affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree.
*/
declare module 'resolve-pathname' {
export default function resolvePathname(to: string, from?: string): string;
}

101
node_modules/@docusaurus/utils/src/emitUtils.ts generated vendored Normal file
View File

@@ -0,0 +1,101 @@
/**
* Copyright (c) Facebook, Inc. and its affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree.
*/
import path from 'path';
import fs from 'fs-extra';
import {createHash} from 'crypto';
import {findAsyncSequential} from './jsUtils';
const fileHash = new Map<string, string>();
/**
* Outputs a file to the generated files directory. Only writes files if content
* differs from cache (for hot reload performance).
*
* @param generatedFilesDir Absolute path.
* @param file Path relative to `generatedFilesDir`. File will always be
* outputted; no need to ensure directory exists.
* @param content String content to write.
* @param skipCache If `true` (defaults as `true` for production), file is
* force-rewritten, skipping cache.
*/
export async function generate(
generatedFilesDir: string,
file: string,
content: string,
skipCache: boolean = process.env.NODE_ENV === 'production',
): Promise<void> {
const filepath = path.resolve(generatedFilesDir, file);
if (skipCache) {
await fs.outputFile(filepath, content);
// Cache still needs to be reset, otherwise, writing "A", "B", and "A" where
// "B" skips cache will cause the last "A" not be able to overwrite as the
// first "A" remains in cache. But if the file never existed in cache, no
// need to register it.
if (fileHash.get(filepath)) {
fileHash.set(filepath, createHash('md5').update(content).digest('hex'));
}
return;
}
let lastHash = fileHash.get(filepath);
// If file already exists but it's not in runtime cache yet, we try to
// calculate the content hash and then compare. This is to avoid unnecessary
// overwriting and we can reuse old file.
if (!lastHash && (await fs.pathExists(filepath))) {
const lastContent = await fs.readFile(filepath, 'utf8');
lastHash = createHash('md5').update(lastContent).digest('hex');
fileHash.set(filepath, lastHash);
}
const currentHash = createHash('md5').update(content).digest('hex');
if (lastHash !== currentHash) {
await fs.outputFile(filepath, content);
fileHash.set(filepath, currentHash);
}
}
/**
* @param permalink The URL that the HTML file corresponds to, without base URL
* @param outDir Full path to the output directory
* @param trailingSlash The site config option. If provided, only one path will
* be read.
* @returns This returns a buffer, which you have to decode string yourself if
* needed. (Not always necessary since the output isn't for human consumption
* anyways, and most HTML manipulation libs accept buffers)
* @throws Throws when the HTML file is not found at any of the potential paths.
* This should never happen as it would lead to a 404.
*/
export async function readOutputHTMLFile(
permalink: string,
outDir: string,
trailingSlash: boolean | undefined,
): Promise<Buffer> {
const withTrailingSlashPath = path.join(outDir, permalink, 'index.html');
const withoutTrailingSlashPath = (() => {
const basePath = path.join(outDir, permalink.replace(/\/$/, ''));
const htmlSuffix = /\.html?$/i.test(basePath) ? '' : '.html';
return `${basePath}${htmlSuffix}`;
})();
const possibleHtmlPaths = [
trailingSlash !== false && withTrailingSlashPath,
trailingSlash !== true && withoutTrailingSlashPath,
].filter((p): p is string => Boolean(p));
const HTMLPath = await findAsyncSequential(possibleHtmlPaths, fs.pathExists);
if (!HTMLPath) {
throw new Error(
`Expected output HTML file to be found at ${withTrailingSlashPath} for permalink ${permalink}.`,
);
}
return fs.readFile(HTMLPath);
}

146
node_modules/@docusaurus/utils/src/gitUtils.ts generated vendored Normal file
View File

@@ -0,0 +1,146 @@
/**
* Copyright (c) Facebook, Inc. and its affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree.
*/
import path from 'path';
import shell from 'shelljs';
/** Custom error thrown when git is not found in `PATH`. */
export class GitNotFoundError extends Error {}
/** Custom error thrown when the current file is not tracked by git. */
export class FileNotTrackedError extends Error {}
/**
* Fetches the git history of a file and returns a relevant commit date.
* It gets the commit date instead of author date so that amended commits
* can have their dates updated.
*
* @throws {@link GitNotFoundError} If git is not found in `PATH`.
* @throws {@link FileNotTrackedError} If the current file is not tracked by git.
* @throws Also throws when `git log` exited with non-zero, or when it outputs
* unexpected text.
*/
export function getFileCommitDate(
/** Absolute path to the file. */
file: string,
args: {
/**
* `"oldest"` is the commit that added the file, following renames;
* `"newest"` is the last commit that edited the file.
*/
age?: 'oldest' | 'newest';
/** Use `includeAuthor: true` to get the author information as well. */
includeAuthor?: false;
},
): {
/** Relevant commit date. */
date: Date;
/** Timestamp in **seconds**, as returned from git. */
timestamp: number;
};
/**
* Fetches the git history of a file and returns a relevant commit date.
* It gets the commit date instead of author date so that amended commits
* can have their dates updated.
*
* @throws {@link GitNotFoundError} If git is not found in `PATH`.
* @throws {@link FileNotTrackedError} If the current file is not tracked by git.
* @throws Also throws when `git log` exited with non-zero, or when it outputs
* unexpected text.
*/
export function getFileCommitDate(
/** Absolute path to the file. */
file: string,
args: {
/**
* `"oldest"` is the commit that added the file, following renames;
* `"newest"` is the last commit that edited the file.
*/
age?: 'oldest' | 'newest';
includeAuthor: true;
},
): {
/** Relevant commit date. */
date: Date;
/** Timestamp in **seconds**, as returned from git. */
timestamp: number;
/** The author's name, as returned from git. */
author: string;
};
export function getFileCommitDate(
file: string,
{
age = 'oldest',
includeAuthor = false,
}: {
age?: 'oldest' | 'newest';
includeAuthor?: boolean;
},
): {
date: Date;
timestamp: number;
author?: string;
} {
if (!shell.which('git')) {
throw new GitNotFoundError(
`Failed to retrieve git history for "${file}" because git is not installed.`,
);
}
if (!shell.test('-f', file)) {
throw new Error(
`Failed to retrieve git history for "${file}" because the file does not exist.`,
);
}
const args = [
`--format=%ct${includeAuthor ? ',%an' : ''}`,
'--max-count=1',
age === 'oldest' ? '--follow --diff-filter=A' : undefined,
]
.filter(Boolean)
.join(' ');
const result = shell.exec(`git log ${args} -- "${path.basename(file)}"`, {
// Setting cwd is important, see: https://github.com/facebook/docusaurus/pull/5048
cwd: path.dirname(file),
silent: true,
});
if (result.code !== 0) {
throw new Error(
`Failed to retrieve the git history for file "${file}" with exit code ${result.code}: ${result.stderr}`,
);
}
let regex = /^(?<timestamp>\d+)$/;
if (includeAuthor) {
regex = /^(?<timestamp>\d+),(?<author>.+)$/;
}
const output = result.stdout.trim();
if (!output) {
throw new FileNotTrackedError(
`Failed to retrieve the git history for file "${file}" because the file is not tracked by git.`,
);
}
const match = output.match(regex);
if (!match) {
throw new Error(
`Failed to retrieve the git history for file "${file}" with unexpected output: ${output}`,
);
}
const timestamp = Number(match.groups!.timestamp);
const date = new Date(timestamp * 1000);
if (includeAuthor) {
return {date, timestamp, author: match.groups!.author!};
}
return {date, timestamp};
}

88
node_modules/@docusaurus/utils/src/globUtils.ts generated vendored Normal file
View File

@@ -0,0 +1,88 @@
/**
* Copyright (c) Facebook, Inc. and its affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree.
*/
// Globby/Micromatch are the 2 libs we use in Docusaurus consistently
import path from 'path';
import Micromatch from 'micromatch'; // Note: Micromatch is used by Globby
import {addSuffix} from './jsUtils';
/** A re-export of the globby instance. */
export {default as Globby} from 'globby';
/**
* The default glob patterns we ignore when sourcing content.
* - Ignore files and folders starting with `_` recursively
* - Ignore tests
*/
export const GlobExcludeDefault = [
'**/_*.{js,jsx,ts,tsx,md,mdx}',
'**/_*/**',
'**/*.test.{js,jsx,ts,tsx}',
'**/__tests__/**',
];
type Matcher = (str: string) => boolean;
/**
* A very thin wrapper around `Micromatch.makeRe`.
*
* @see {@link createAbsoluteFilePathMatcher}
* @param patterns A list of glob patterns. If the list is empty, it defaults to
* matching none.
* @returns A matcher handle that tells if a file path is matched by any of the
* patterns.
*/
export function createMatcher(patterns: string[]): Matcher {
if (patterns.length === 0) {
// `/(?:)/.test("foo")` is `true`
return () => false;
}
const regexp = new RegExp(
patterns.map((pattern) => Micromatch.makeRe(pattern).source).join('|'),
);
return (str) => regexp.test(str);
}
/**
* We use match patterns like `"** /_* /**"` (ignore the spaces), where `"_*"`
* should only be matched within a subfolder. This function would:
* - Match `/user/sebastien/website/docs/_partials/xyz.md`
* - Ignore `/user/_sebastien/website/docs/partials/xyz.md`
*
* @param patterns A list of glob patterns.
* @param rootFolders A list of root folders to resolve the glob from.
* @returns A matcher handle that tells if a file path is matched by any of the
* patterns, resolved from the first root folder that contains the path.
* @throws Throws when the returned matcher receives a path that doesn't belong
* to any of the `rootFolders`.
*/
export function createAbsoluteFilePathMatcher(
patterns: string[],
rootFolders: string[],
): Matcher {
const matcher = createMatcher(patterns);
function getRelativeFilePath(absoluteFilePath: string) {
const rootFolder = rootFolders.find((folderPath) =>
[addSuffix(folderPath, '/'), addSuffix(folderPath, '\\')].some((p) =>
absoluteFilePath.startsWith(p),
),
);
if (!rootFolder) {
throw new Error(
`createAbsoluteFilePathMatcher unexpected error, absoluteFilePath=${absoluteFilePath} was not contained in any of the root folders: ${rootFolders.join(
', ',
)}`,
);
}
return path.relative(rootFolder, absoluteFilePath);
}
return (absoluteFilePath: string) =>
matcher(getRelativeFilePath(absoluteFilePath));
}

38
node_modules/@docusaurus/utils/src/hashUtils.ts generated vendored Normal file
View File

@@ -0,0 +1,38 @@
/**
* Copyright (c) Facebook, Inc. and its affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree.
*/
import {createHash} from 'crypto';
import _ from 'lodash';
import {shortName, isNameTooLong} from './pathUtils';
/** Thin wrapper around `crypto.createHash("md5")`. */
export function md5Hash(str: string): string {
return createHash('md5').update(str).digest('hex');
}
/** Creates an MD5 hash and truncates it to the given length. */
export function simpleHash(str: string, length: number): string {
return md5Hash(str).substring(0, length);
}
// Based on https://github.com/gatsbyjs/gatsby/pull/21518/files
/**
* Given an input string, convert to kebab-case and append a hash, avoiding name
* collision. Also removes part of the string if its larger than the allowed
* filename per OS, avoiding `ERRNAMETOOLONG` error.
*/
export function docuHash(str: string): string {
if (str === '/') {
return 'index';
}
const shortHash = simpleHash(str, 3);
const parsedPath = `${_.kebabCase(str)}-${shortHash}`;
if (isNameTooLong(parsedPath)) {
return `${shortName(_.kebabCase(str))}-${shortHash}`;
}
return parsedPath;
}

114
node_modules/@docusaurus/utils/src/i18nUtils.ts generated vendored Normal file
View File

@@ -0,0 +1,114 @@
/**
* Copyright (c) Facebook, Inc. and its affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree.
*/
import path from 'path';
import _ from 'lodash';
import {DEFAULT_PLUGIN_ID} from './constants';
import {normalizeUrl} from './urlUtils';
import type {
TranslationFileContent,
TranslationFile,
I18n,
} from '@docusaurus/types';
/**
* Takes a list of translation file contents, and shallow-merges them into one.
*/
export function mergeTranslations(
contents: TranslationFileContent[],
): TranslationFileContent {
return contents.reduce((acc, content) => ({...acc, ...content}), {});
}
/**
* Useful to update all the messages of a translation file. Used in tests to
* simulate translations.
*/
export function updateTranslationFileMessages(
translationFile: TranslationFile,
updateMessage: (message: string) => string,
): TranslationFile {
return {
...translationFile,
content: _.mapValues(translationFile.content, (translation) => ({
...translation,
message: updateMessage(translation.message),
})),
};
}
/**
* Takes everything needed and constructs a plugin i18n path. Plugins should
* expect everything it needs for translations to be found under this path.
*/
export function getPluginI18nPath({
localizationDir,
pluginName,
pluginId = DEFAULT_PLUGIN_ID,
subPaths = [],
}: {
localizationDir: string;
pluginName: string;
pluginId?: string | undefined;
subPaths?: string[];
}): string {
return path.join(
localizationDir,
// Make it convenient to use for single-instance
// ie: return "docs", not "docs-default" nor "docs/default"
`${pluginName}${pluginId === DEFAULT_PLUGIN_ID ? '' : `-${pluginId}`}`,
...subPaths,
);
}
/**
* Takes a path and returns a localized a version (which is basically `path +
* i18n.currentLocale`).
*
* This is used to resolve the `outDir` and `baseUrl` of each locale; it is NOT
* used to determine plugin localization file locations.
*/
export function localizePath({
pathType,
path: originalPath,
i18n,
options = {},
}: {
/**
* FS paths will treat Windows specially; URL paths will always have a
* trailing slash to make it a valid base URL.
*/
pathType: 'fs' | 'url';
/** The path, URL or file path, to be localized. */
path: string;
/** The current i18n context. */
i18n: I18n;
options?: {
/**
* By default, we don't localize the path of defaultLocale. This option
* would override that behavior. Setting `false` is useful for `yarn build
* -l zh-Hans` to always emit into the root build directory.
*/
localizePath?: boolean;
};
}): string {
const shouldLocalizePath: boolean =
options.localizePath ?? i18n.currentLocale !== i18n.defaultLocale;
if (!shouldLocalizePath) {
return originalPath;
}
// FS paths need special care, for Windows support. Note: we don't use the
// locale config's `path` here, because this function is used for resolving
// outDir, which must be the same as baseUrl. When we have the baseUrl config,
// we need to sync the two.
if (pathType === 'fs') {
return path.join(originalPath, i18n.currentLocale);
}
// Url paths; add a trailing slash so it's a valid base URL
return normalizeUrl([originalPath, i18n.currentLocale, '/']);
}

119
node_modules/@docusaurus/utils/src/index.ts generated vendored Normal file
View File

@@ -0,0 +1,119 @@
/**
* Copyright (c) Facebook, Inc. and its affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree.
*/
export {
NODE_MAJOR_VERSION,
NODE_MINOR_VERSION,
DOCUSAURUS_VERSION,
DEFAULT_BUILD_DIR_NAME,
DEFAULT_CONFIG_FILE_NAME,
BABEL_CONFIG_FILE_NAME,
GENERATED_FILES_DIR_NAME,
SRC_DIR_NAME,
DEFAULT_STATIC_DIR_NAME,
OUTPUT_STATIC_ASSETS_DIR_NAME,
THEME_PATH,
DEFAULT_I18N_DIR_NAME,
CODE_TRANSLATIONS_FILE_NAME,
DEFAULT_PORT,
DEFAULT_PLUGIN_ID,
WEBPACK_URL_LOADER_LIMIT,
} from './constants';
export {generate, readOutputHTMLFile} from './emitUtils';
export {
getFileCommitDate,
FileNotTrackedError,
GitNotFoundError,
} from './gitUtils';
export {
mergeTranslations,
updateTranslationFileMessages,
getPluginI18nPath,
localizePath,
} from './i18nUtils';
export {
removeSuffix,
removePrefix,
mapAsyncSequential,
findAsyncSequential,
} from './jsUtils';
export {
normalizeUrl,
getEditUrl,
fileToPath,
encodePath,
isValidPathname,
resolvePathname,
parseURLPath,
serializeURLPath,
addLeadingSlash,
addTrailingSlash,
removeTrailingSlash,
hasSSHProtocol,
buildHttpsUrl,
buildSshUrl,
} from './urlUtils';
export type {URLPath} from './urlUtils';
export {
type Tag,
type TagsListItem,
type TagModule,
type FrontMatterTag,
normalizeFrontMatterTags,
groupTaggedItems,
getTagVisibility,
} from './tags';
export {
parseMarkdownHeadingId,
escapeMarkdownHeadingIds,
unwrapMdxCodeBlocks,
admonitionTitleToDirectiveLabel,
createExcerpt,
DEFAULT_PARSE_FRONT_MATTER,
parseMarkdownContentTitle,
parseMarkdownFile,
writeMarkdownHeadingId,
type WriteHeadingIDOptions,
} from './markdownUtils';
export {
type ContentPaths,
type BrokenMarkdownLink,
replaceMarkdownLinks,
} from './markdownLinks';
export {type SluggerOptions, type Slugger, createSlugger} from './slugger';
export {
isNameTooLong,
shortName,
posixPath,
toMessageRelativeFilePath,
aliasedSitePath,
escapePath,
addTrailingPathSeparator,
} from './pathUtils';
export {md5Hash, simpleHash, docuHash} from './hashUtils';
export {
Globby,
GlobExcludeDefault,
createMatcher,
createAbsoluteFilePathMatcher,
} from './globUtils';
export {
getFileLoaderUtils,
getWebpackLoaderCompilerName,
type WebpackCompilerName,
} from './webpackUtils';
export {escapeShellArg} from './shellUtils';
export {loadFreshModule} from './moduleUtils';
export {
getDataFilePath,
getDataFileData,
getContentPathList,
findFolderContainingFile,
getFolderContainingFile,
} from './dataFileUtils';
export {isDraft, isUnlisted} from './contentVisibilityUtils';
export {escapeRegexp} from './regExpUtils';

69
node_modules/@docusaurus/utils/src/jsUtils.ts generated vendored Normal file
View File

@@ -0,0 +1,69 @@
/**
* Copyright (c) Facebook, Inc. and its affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree.
*/
/** Adds a given string prefix to `str`. */
export function addPrefix(str: string, prefix: string): string {
return str.startsWith(prefix) ? str : `${prefix}${str}`;
}
/** Adds a given string suffix to `str`. */
export function addSuffix(str: string, suffix: string): string {
return str.endsWith(suffix) ? str : `${str}${suffix}`;
}
/** Removes a given string suffix from `str`. */
export function removeSuffix(str: string, suffix: string): string {
if (suffix === '') {
// str.slice(0, 0) is ""
return str;
}
return str.endsWith(suffix) ? str.slice(0, -suffix.length) : str;
}
/** Removes a given string prefix from `str`. */
export function removePrefix(str: string, prefix: string): string {
return str.startsWith(prefix) ? str.slice(prefix.length) : str;
}
/**
* `Array#map` for async operations where order matters.
* @param array The array to traverse.
* @param action An async action to be performed on every array item. Will be
* awaited before working on the next.
* @returns The list of results returned from every `action(item)`
*/
export async function mapAsyncSequential<T, R>(
array: T[],
action: (t: T) => Promise<R>,
): Promise<R[]> {
const results: R[] = [];
for (const t of array) {
const result = await action(t);
results.push(result);
}
return results;
}
/**
* `Array#find` for async operations where order matters.
* @param array The array to traverse.
* @param predicate An async predicate to be called on every array item. Should
* return a boolean indicating whether the currently element should be returned.
* @returns The function immediately returns the first item on which `predicate`
* returns `true`, or `undefined` if none matches the predicate.
*/
export async function findAsyncSequential<T>(
array: T[],
predicate: (t: T) => Promise<boolean>,
): Promise<T | undefined> {
for (const t of array) {
if (await predicate(t)) {
return t;
}
}
return undefined;
}

198
node_modules/@docusaurus/utils/src/markdownLinks.ts generated vendored Normal file
View File

@@ -0,0 +1,198 @@
/**
* Copyright (c) Facebook, Inc. and its affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree.
*/
import path from 'path';
import {getContentPathList} from './dataFileUtils';
import {aliasedSitePath} from './pathUtils';
/**
* Content plugins have a base path and a localized path to source content from.
* We will look into the localized path in priority.
*/
export type ContentPaths = {
/**
* The absolute path to the base content directory, like `"<siteDir>/docs"`.
*/
contentPath: string;
/**
* The absolute path to the localized content directory, like
* `"<siteDir>/i18n/zh-Hans/plugin-content-docs"`.
*/
contentPathLocalized: string;
};
/** Data structure representing each broken Markdown link to be reported. */
export type BrokenMarkdownLink<T extends ContentPaths> = {
/** Absolute path to the file containing this link. */
filePath: string;
/**
* This is generic because it may contain extra metadata like version name,
* which the reporter can provide for context.
*/
contentPaths: T;
/**
* The content of the link, like `"./brokenFile.md"`
*/
link: string;
};
type CodeFence = {
type: '`' | '~';
definitelyOpen: boolean;
count: number;
};
function parseCodeFence(line: string): CodeFence | null {
const match = line.trim().match(/^(?<fence>`{3,}|~{3,})(?<rest>.*)/);
if (!match) {
return null;
}
return {
type: match.groups!.fence![0]! as '`' | '~',
definitelyOpen: !!match.groups!.rest!,
count: match.groups!.fence!.length,
};
}
/**
* Takes a Markdown file and replaces relative file references with their URL
* counterparts, e.g. `[link](./intro.md)` => `[link](/docs/intro)`, preserving
* everything else.
*
* This method uses best effort to find a matching file. The file reference can
* be relative to the directory of the current file (most likely) or any of the
* content paths (so `/tutorials/intro.md` can be resolved as
* `<siteDir>/docs/tutorials/intro.md`). Links that contain the `http(s):` or
* `@site/` prefix will always be ignored.
*/
export function replaceMarkdownLinks<T extends ContentPaths>({
siteDir,
fileString,
filePath,
contentPaths,
sourceToPermalink,
}: {
/** Absolute path to the site directory, used to resolve aliased paths. */
siteDir: string;
/** The Markdown file content to be processed. */
fileString: string;
/** Absolute path to the current file containing `fileString`. */
filePath: string;
/** The content paths which the file reference may live in. */
contentPaths: T;
/**
* A map from source paths to their URLs. Source paths are `@site` aliased.
*/
sourceToPermalink: {[aliasedPath: string]: string};
}): {
/**
* The content with all Markdown file references replaced with their URLs.
* Unresolved links are left as-is.
*/
newContent: string;
/** The list of broken links, */
brokenMarkdownLinks: BrokenMarkdownLink<T>[];
} {
const brokenMarkdownLinks: BrokenMarkdownLink<T>[] = [];
// Replace internal markdown linking (except in fenced blocks).
let lastOpenCodeFence: CodeFence | null = null;
const lines = fileString.split('\n').map((line) => {
const codeFence = parseCodeFence(line);
if (codeFence) {
if (!lastOpenCodeFence) {
lastOpenCodeFence = codeFence;
} else if (
!codeFence.definitelyOpen &&
lastOpenCodeFence.type === codeFence.type &&
lastOpenCodeFence.count <= codeFence.count
) {
// All three conditions must be met in order for this to be considered
// a closing fence.
lastOpenCodeFence = null;
}
}
if (lastOpenCodeFence) {
return line;
}
let modifiedLine = line;
// Replace inline-style links or reference-style links e.g:
// This is [Document 1](doc1.md)
// [doc1]: doc1.md
const linkTitlePattern = '(?:\\s+(?:\'.*?\'|".*?"|\\(.*?\\)))?';
const linkSuffixPattern = '(?:\\?[^#>\\s]+)?(?:#[^>\\s]+)?';
const linkCapture = (forbidden: string) =>
`((?!https?://|@site/)[^${forbidden}#?]+)`;
const linkURLPattern = `(?:(?!<)${linkCapture(
'()\\s',
)}${linkSuffixPattern}|<${linkCapture('>')}${linkSuffixPattern}>)`;
const linkPattern = new RegExp(
`\\[(?:(?!\\]\\().)*\\]\\(\\s*${linkURLPattern}${linkTitlePattern}\\s*\\)|^\\s*\\[[^[\\]]*[^[\\]\\s][^[\\]]*\\]:\\s*${linkURLPattern}${linkTitlePattern}$`,
'dgm',
);
let mdMatch = linkPattern.exec(modifiedLine);
while (mdMatch !== null) {
// Replace it to correct html link.
const mdLink = mdMatch.slice(1, 5).find(Boolean)!;
const mdLinkRange = mdMatch.indices!.slice(1, 5).find(Boolean)!;
if (!/\.mdx?$/.test(mdLink)) {
mdMatch = linkPattern.exec(modifiedLine);
continue;
}
const sourcesToTry: string[] = [];
// ./file.md and ../file.md are always relative to the current file
if (!mdLink.startsWith('./') && !mdLink.startsWith('../')) {
sourcesToTry.push(...getContentPathList(contentPaths), siteDir);
}
// /file.md is always relative to the content path
if (!mdLink.startsWith('/')) {
sourcesToTry.push(path.dirname(filePath));
}
const aliasedSourceMatch = sourcesToTry
.map((p) => path.join(p, decodeURIComponent(mdLink)))
.map((source) => aliasedSitePath(source, siteDir))
.find((source) => sourceToPermalink[source]);
const permalink: string | undefined = aliasedSourceMatch
? sourceToPermalink[aliasedSourceMatch]
: undefined;
if (permalink) {
// MDX won't be happy if the permalink contains a space, we need to
// convert it to %20
const encodedPermalink = permalink
.split('/')
.map((part) => part.replace(/\s/g, '%20'))
.join('/');
modifiedLine = `${modifiedLine.slice(
0,
mdLinkRange[0],
)}${encodedPermalink}${modifiedLine.slice(mdLinkRange[1])}`;
// Adjust the lastIndex to avoid passing over the next link if the
// newly replaced URL is shorter.
linkPattern.lastIndex += encodedPermalink.length - mdLink.length;
} else {
const brokenMarkdownLink: BrokenMarkdownLink<T> = {
contentPaths,
filePath,
link: mdLink,
};
brokenMarkdownLinks.push(brokenMarkdownLink);
}
mdMatch = linkPattern.exec(modifiedLine);
}
return modifiedLine;
});
const newContent = lines.join('\n');
return {newContent, brokenMarkdownLinks};
}

462
node_modules/@docusaurus/utils/src/markdownUtils.ts generated vendored Normal file
View File

@@ -0,0 +1,462 @@
/**
* Copyright (c) Facebook, Inc. and its affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree.
*/
import logger from '@docusaurus/logger';
import matter from 'gray-matter';
import {createSlugger, type Slugger, type SluggerOptions} from './slugger';
import type {
ParseFrontMatter,
DefaultParseFrontMatter,
} from '@docusaurus/types';
// Some utilities for parsing Markdown content. These things are only used on
// server-side when we infer metadata like `title` and `description` from the
// content. Most parsing is still done in MDX through the mdx-loader.
/**
* Parses custom ID from a heading. The ID can contain any characters except
* `{#` and `}`.
*
* @param heading e.g. `## Some heading {#some-heading}` where the last
* character must be `}` for the ID to be recognized
*/
export function parseMarkdownHeadingId(heading: string): {
/**
* The heading content sans the ID part, right-trimmed. e.g. `## Some heading`
*/
text: string;
/** The heading ID. e.g. `some-heading` */
id: string | undefined;
} {
const customHeadingIdRegex = /\s*\{#(?<id>(?:.(?!\{#|\}))*.)\}$/;
const matches = customHeadingIdRegex.exec(heading);
if (matches) {
return {
text: heading.replace(matches[0]!, ''),
id: matches.groups!.id!,
};
}
return {text: heading, id: undefined};
}
/**
* MDX 2 requires escaping { with a \ so our anchor syntax need that now.
* See https://mdxjs.com/docs/troubleshooting-mdx/#could-not-parse-expression-with-acorn-error
*/
export function escapeMarkdownHeadingIds(content: string): string {
const markdownHeadingRegexp = /(?:^|\n)#{1,6}(?!#).*/g;
return content.replaceAll(markdownHeadingRegexp, (substring) =>
// TODO probably not the most efficient impl...
substring
.replace('{#', '\\{#')
// prevent duplicate escaping
.replace('\\\\{#', '\\{#'),
);
}
/**
* Hacky temporary escape hatch for Crowdin bad MDX support
* See https://docusaurus.io/docs/i18n/crowdin#mdx
*
* TODO Titus suggested a clean solution based on ```mdx eval and Remark
* See https://github.com/mdx-js/mdx/issues/701#issuecomment-947030041
*
* @param content
*/
export function unwrapMdxCodeBlocks(content: string): string {
// We only support 3/4 backticks on purpose, should be good enough
const regexp3 =
/(?<begin>^|\n)```(?<spaces>\x20*)mdx-code-block\n(?<children>.*?)\n```(?<end>\n|$)/gs;
const regexp4 =
/(?<begin>^|\n)````(?<spaces>\x20*)mdx-code-block\n(?<children>.*?)\n````(?<end>\n|$)/gs;
// eslint-disable-next-line @typescript-eslint/no-explicit-any
const replacer = (substring: string, ...args: any[]) => {
const groups = args.at(-1);
return `${groups.begin}${groups.children}${groups.end}`;
};
return content.replaceAll(regexp3, replacer).replaceAll(regexp4, replacer);
}
/**
* Add support for our legacy ":::note Title" admonition syntax
* Not supported by https://github.com/remarkjs/remark-directive
* Syntax is transformed to ":::note[Title]" (container directive label)
* See https://talk.commonmark.org/t/generic-directives-plugins-syntax/444
*
* @param content
* @param admonitionContainerDirectives
*/
export function admonitionTitleToDirectiveLabel(
content: string,
admonitionContainerDirectives: string[],
): string {
// this will also process ":::note Title" inside docs code blocks
// good enough: we fixed older versions docs to not be affected
const directiveNameGroup = `(${admonitionContainerDirectives.join('|')})`;
const regexp = new RegExp(
`^(?<quote>(> ?)*)(?<indentation>( +|\t+))?(?<directive>:{3,}${directiveNameGroup}) +(?<title>.*)$`,
'gm',
);
return content.replaceAll(regexp, (substring, ...args: any[]) => {
const groups = args.at(-1);
return `${groups.quote ?? ''}${groups.indentation ?? ''}${
groups.directive
}[${groups.title}]`;
});
}
// TODO: Find a better way to do so, possibly by compiling the Markdown content,
// stripping out HTML tags and obtaining the first line.
/**
* Creates an excerpt of a Markdown file. This function will:
*
* - Ignore h1 headings (setext or atx)
* - Ignore import/export
* - Ignore code blocks
*
* And for the first contentful line, it will strip away most Markdown
* syntax, including HTML tags, emphasis, links (keeping the text), etc.
*/
export function createExcerpt(fileString: string): string | undefined {
const fileLines = fileString
.trimStart()
// Remove Markdown alternate title
.replace(/^[^\r\n]*\r?\n[=]+/g, '')
.split(/\r?\n/);
let inCode = false;
let inImport = false;
let lastCodeFence = '';
for (const fileLine of fileLines) {
// An empty line marks the end of imports
if (!fileLine.trim() && inImport) {
inImport = false;
}
// Skip empty line.
if (!fileLine.trim()) {
continue;
}
// Skip import/export declaration.
if ((/^(?:import|export)\s.*/.test(fileLine) || inImport) && !inCode) {
inImport = true;
continue;
}
// Skip code block line.
if (fileLine.trim().startsWith('```')) {
const codeFence = fileLine.trim().match(/^`+/)![0]!;
if (!inCode) {
inCode = true;
lastCodeFence = codeFence;
// If we are in a ````-fenced block, all ``` would be plain text instead
// of fences
} else if (codeFence.length >= lastCodeFence.length) {
inCode = false;
}
continue;
} else if (inCode) {
continue;
}
const cleanedLine = fileLine
// Remove HTML tags.
.replace(/<[^>]*>/g, '')
// Remove Title headers
.replace(/^#[^#]+#?/gm, '')
// Remove Markdown + ATX-style headers
.replace(/^#{1,6}\s*(?<text>[^#]*?)\s*#{0,6}/gm, '$1')
// Remove emphasis.
.replace(/(?<opening>[*_]{1,3})(?<text>.*?)\1/g, '$2')
// Remove strikethroughs.
.replace(/~~(?<text>\S.*\S)~~/g, '$1')
// Remove images.
.replace(/!\[(?<alt>.*?)\][[(].*?[\])]/g, '$1')
// Remove footnotes.
.replace(/\[\^.+?\](?:: .*$)?/g, '')
// Remove inline links.
.replace(/\[(?<alt>.*?)\][[(].*?[\])]/g, '$1')
// Remove inline code.
.replace(/`(?<text>.+?)`/g, '$1')
// Remove blockquotes.
.replace(/^\s{0,3}>\s?/g, '')
// Remove admonition definition.
.replace(/:::.*/, '')
// Remove Emoji names within colons include preceding whitespace.
.replace(/\s?:(?:::|[^:\n])+:/g, '')
// Remove custom Markdown heading id.
.replace(/\{#*[\w-]+\}/, '')
.trim();
if (cleanedLine) {
return cleanedLine;
}
}
return undefined;
}
/**
* Takes a raw Markdown file content, and parses the front matter using
* gray-matter. Worth noting that gray-matter accepts TOML and other markup
* languages as well.
*
* @throws Throws when gray-matter throws. e.g.:
* ```md
* ---
* foo: : bar
* ---
* ```
*/
export function parseFileContentFrontMatter(fileContent: string): {
/** Front matter as parsed by gray-matter. */
frontMatter: {[key: string]: unknown};
/** The remaining content, trimmed. */
content: string;
} {
// TODO Docusaurus v4: replace gray-matter by a better lib
// gray-matter is unmaintained, not flexible, and the code doesn't look good
const {data, content} = matter(fileContent);
// gray-matter has an undocumented front matter caching behavior
// https://github.com/jonschlinkert/gray-matter/blob/ce67a86dba419381db0dd01cc84e2d30a1d1e6a5/index.js#L39
// Unfortunately, this becomes a problem when we mutate returned front matter
// We want to make it possible as part of the parseFrontMatter API
// So we make it safe to mutate by always providing a deep copy
const frontMatter =
// And of course structuredClone() doesn't work well with Date in Jest...
// See https://github.com/jestjs/jest/issues/2549
// So we parse again for tests with a {} option object
// This undocumented empty option object disables gray-matter caching..
process.env.JEST_WORKER_ID
? matter(fileContent, {}).data
: structuredClone(data);
return {
frontMatter,
content: content.trim(),
};
}
export const DEFAULT_PARSE_FRONT_MATTER: DefaultParseFrontMatter = async (
params,
) => parseFileContentFrontMatter(params.fileContent);
function toTextContentTitle(contentTitle: string): string {
return contentTitle.replace(/`(?<text>[^`]*)`/g, '$<text>');
}
type ParseMarkdownContentTitleOptions = {
/**
* If `true`, the matching title will be removed from the returned content.
* We can promise that at least one empty line will be left between the
* content before and after, but you shouldn't make too much assumption
* about what's left.
*/
removeContentTitle?: boolean;
};
/**
* Takes the raw Markdown content, without front matter, and tries to find an h1
* title (setext or atx) to be used as metadata.
*
* It only searches until the first contentful paragraph, ignoring import/export
* declarations.
*
* It will try to convert markdown to reasonable text, but won't be best effort,
* since it's only used as a fallback when `frontMatter.title` is not provided.
* For now, we just unwrap inline code (``# `config.js` `` => `config.js`).
*/
export function parseMarkdownContentTitle(
contentUntrimmed: string,
options?: ParseMarkdownContentTitleOptions,
): {
/** The content, optionally without the content title. */
content: string;
/** The title, trimmed and without the `#`. */
contentTitle: string | undefined;
} {
const removeContentTitleOption = options?.removeContentTitle ?? false;
const content = contentUntrimmed.trim();
// We only need to detect import statements that will be parsed by MDX as
// `import` nodes, as broken syntax can't render anyways. That means any block
// that has `import` at the very beginning and surrounded by empty lines.
const contentWithoutImport = content
.replace(/^(?:import\s(?:.|\r?\n(?!\r?\n))*(?:\r?\n){2,})*/, '')
.trim();
const regularTitleMatch = /^#[ \t]+(?<title>[^ \t].*)(?:\r?\n|$)/.exec(
contentWithoutImport,
);
const alternateTitleMatch = /^(?<title>.*)\r?\n=+(?:\r?\n|$)/.exec(
contentWithoutImport,
);
const titleMatch = regularTitleMatch ?? alternateTitleMatch;
if (!titleMatch) {
return {content, contentTitle: undefined};
}
const newContent = removeContentTitleOption
? content.replace(titleMatch[0]!, '')
: content;
if (regularTitleMatch) {
return {
content: newContent.trim(),
contentTitle: toTextContentTitle(
regularTitleMatch
.groups!.title!.trim()
.replace(/\s*(?:\{#*[\w-]+\}|#+)$/, ''),
).trim(),
};
}
return {
content: newContent.trim(),
contentTitle: toTextContentTitle(
alternateTitleMatch!.groups!.title!.trim().replace(/\s*=+$/, ''),
).trim(),
};
}
/**
* Makes a full-round parse.
*
* @throws Throws when `parseFrontMatter` throws, usually because of invalid
* syntax.
*/
export async function parseMarkdownFile({
filePath,
fileContent,
parseFrontMatter,
removeContentTitle,
}: {
filePath: string;
fileContent: string;
parseFrontMatter: ParseFrontMatter;
} & ParseMarkdownContentTitleOptions): Promise<{
/** @see {@link parseFrontMatter} */
frontMatter: {[key: string]: unknown};
/** @see {@link parseMarkdownContentTitle} */
contentTitle: string | undefined;
/** @see {@link createExcerpt} */
excerpt: string | undefined;
/**
* Content without front matter and (optionally) without title, depending on
* the `removeContentTitle` option.
*/
content: string;
}> {
try {
const {frontMatter, content: contentWithoutFrontMatter} =
await parseFrontMatter({
filePath,
fileContent,
defaultParseFrontMatter: DEFAULT_PARSE_FRONT_MATTER,
});
const {content, contentTitle} = parseMarkdownContentTitle(
contentWithoutFrontMatter,
{removeContentTitle},
);
const excerpt = createExcerpt(content);
return {
frontMatter,
content,
contentTitle,
excerpt,
};
} catch (err) {
logger.error(`Error while parsing Markdown front matter.
This can happen if you use special characters in front matter values (try using double quotes around that value).`);
throw err;
}
}
function unwrapMarkdownLinks(line: string): string {
return line.replace(
/\[(?<alt>[^\]]+)\]\([^)]+\)/g,
(match, p1: string) => p1,
);
}
function addHeadingId(
line: string,
slugger: Slugger,
maintainCase: boolean,
): string {
let headingLevel = 0;
while (line.charAt(headingLevel) === '#') {
headingLevel += 1;
}
const headingText = line.slice(headingLevel).trimEnd();
const headingHashes = line.slice(0, headingLevel);
const slug = slugger.slug(unwrapMarkdownLinks(headingText).trim(), {
maintainCase,
});
return `${headingHashes}${headingText} {#${slug}}`;
}
export type WriteHeadingIDOptions = SluggerOptions & {
/** Overwrite existing heading IDs. */
overwrite?: boolean;
};
/**
* Takes Markdown content, returns new content with heading IDs written.
* Respects existing IDs (unless `overwrite=true`) and never generates colliding
* IDs (through the slugger).
*/
export function writeMarkdownHeadingId(
content: string,
options: WriteHeadingIDOptions = {maintainCase: false, overwrite: false},
): string {
const {maintainCase = false, overwrite = false} = options;
const lines = content.split('\n');
const slugger = createSlugger();
// If we can't overwrite existing slugs, make sure other headings don't
// generate colliding slugs by first marking these slugs as occupied
if (!overwrite) {
lines.forEach((line) => {
const parsedHeading = parseMarkdownHeadingId(line);
if (parsedHeading.id) {
slugger.slug(parsedHeading.id);
}
});
}
let inCode = false;
return lines
.map((line) => {
if (line.startsWith('```')) {
inCode = !inCode;
return line;
}
// Ignore h1 headings, as we don't create anchor links for those
if (inCode || !line.startsWith('##')) {
return line;
}
const parsedHeading = parseMarkdownHeadingId(line);
// Do not process if id is already there
if (parsedHeading.id && !overwrite) {
return line;
}
return addHeadingId(parsedHeading.text, slugger, maintainCase);
})
.join('\n');
}

43
node_modules/@docusaurus/utils/src/moduleUtils.ts generated vendored Normal file
View File

@@ -0,0 +1,43 @@
/**
* Copyright (c) Facebook, Inc. and its affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree.
*/
import jiti from 'jiti';
import logger from '@docusaurus/logger';
/*
jiti is able to load ESM, CJS, JSON, TS modules
*/
export async function loadFreshModule(modulePath: string): Promise<unknown> {
try {
if (typeof modulePath !== 'string') {
throw new Error(
logger.interpolate`Invalid module path of type name=${modulePath}`,
);
}
const load = jiti(__filename, {
// Transpilation cache, can be safely enabled
cache: true,
// Bypass Node.js runtime require cache
// Same as "import-fresh" package we used previously
requireCache: false,
// Only take into consideration the default export
// For now we don't need named exports
// This also helps normalize return value for both CJS/ESM/TS modules
interopDefault: true,
// debug: true,
});
return load(modulePath);
} catch (error) {
throw new Error(
logger.interpolate`Docusaurus could not load module at path path=${modulePath}\nCause: ${
(error as Error).message
}`,
{cause: error},
);
}
}

122
node_modules/@docusaurus/utils/src/pathUtils.ts generated vendored Normal file
View File

@@ -0,0 +1,122 @@
/**
* Copyright (c) Facebook, Inc. and its affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree.
*/
import path from 'path';
// Based on https://github.com/gatsbyjs/gatsby/pull/21518/files
// macOS (APFS) and Windows (NTFS) filename length limit = 255 chars,
// Others = 255 bytes
const MAX_PATH_SEGMENT_CHARS = 255;
const MAX_PATH_SEGMENT_BYTES = 255;
// Space for appending things to the string like file extensions and so on
const SPACE_FOR_APPENDING = 10;
const isMacOs = () => process.platform === 'darwin';
const isWindows = () => process.platform === 'win32';
export const isNameTooLong = (str: string): boolean =>
// Not entirely correct: we can't assume FS from OS. But good enough?
isMacOs() || isWindows()
? // Windows (NTFS) and macOS (APFS) filename length limit (255 chars)
str.length + SPACE_FOR_APPENDING > MAX_PATH_SEGMENT_CHARS
: // Other (255 bytes)
Buffer.from(str).length + SPACE_FOR_APPENDING > MAX_PATH_SEGMENT_BYTES;
export function shortName(str: string): string {
if (isMacOs() || isWindows()) {
const overflowingChars = str.length - MAX_PATH_SEGMENT_CHARS;
return str.slice(
0,
str.length - overflowingChars - SPACE_FOR_APPENDING - 1,
);
}
const strBuffer = Buffer.from(str);
const overflowingBytes =
Buffer.byteLength(strBuffer) - MAX_PATH_SEGMENT_BYTES;
return strBuffer
.slice(
0,
Buffer.byteLength(strBuffer) - overflowingBytes - SPACE_FOR_APPENDING - 1,
)
.toString();
}
/**
* Convert Windows backslash paths to posix style paths.
* E.g: endi\lie -> endi/lie
*
* Returns original path if the posix counterpart is not valid Windows path.
* This makes the legacy code that uses posixPath safe; but also makes it less
* useful when you actually want a path with forward slashes (e.g. for URL)
*
* Adopted from https://github.com/sindresorhus/slash/blob/main/index.js
*/
export function posixPath(str: string): string {
const isExtendedLengthPath = str.startsWith('\\\\?\\');
if (isExtendedLengthPath) {
return str;
}
return str.replace(/\\/g, '/');
}
/**
* When you want to display a path in a message/warning/error, it's more
* convenient to:
*
* - make it relative to `cwd()`
* - convert to posix (ie not using windows \ path separator)
*
* This way, Jest tests can run more reliably on any computer/CI on both
* Unix/Windows
* For Windows users this is not perfect (as they see / instead of \) but it's
* probably good enough
*/
export function toMessageRelativeFilePath(filePath: string): string {
return posixPath(path.relative(process.cwd(), filePath));
}
/**
* Alias filepath relative to site directory, very useful so that we
* don't expose user's site structure.
* Example: some/path/to/website/docs/foo.md -> @site/docs/foo.md
*/
export function aliasedSitePath(filePath: string, siteDir: string): string {
const relativePath = posixPath(path.relative(siteDir, filePath));
// Cannot use path.join() as it resolves '../' and removes
// the '@site'. Let webpack loader resolve it.
return `@site/${relativePath}`;
}
/**
* When you have a path like C:\X\Y
* It is not safe to use directly when generating code
* For example, this would fail due to unescaped \:
* `<img src={require("${filePath}")} />`
* But this would work: `<img src={require("${escapePath(filePath)}")} />`
*
* posixPath can't be used in all cases, because forward slashes are only valid
* Windows paths when they don't contain non-ascii characters, and posixPath
* doesn't escape those that fail to be converted.
*
* This function escapes double quotes but not single quotes (because it uses
* `JSON.stringify`). Therefore, you must put the escaped path inside double
* quotes when generating code.
*/
export function escapePath(str: string): string {
const escaped = JSON.stringify(str);
// Remove the " around the json string;
return escaped.substring(1, escaped.length - 1);
}
export function addTrailingPathSeparator(str: string): string {
return str.endsWith(path.sep)
? str
: // If this is Windows, we need to change the forward slash to backward
`${str.replace(/[\\/]$/, '')}${path.sep}`;
}

12
node_modules/@docusaurus/utils/src/regExpUtils.ts generated vendored Normal file
View File

@@ -0,0 +1,12 @@
/**
* Copyright (c) Facebook, Inc. and its affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree.
*/
import escapeStringRegexp from 'escape-string-regexp';
export function escapeRegexp(string: string): string {
return escapeStringRegexp(string);
}

18
node_modules/@docusaurus/utils/src/shellUtils.ts generated vendored Normal file
View File

@@ -0,0 +1,18 @@
/**
* Copyright (c) Facebook, Inc. and its affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree.
*/
// TODO move from shelljs to execa later?
// Execa is well maintained and widely used
// Even shelljs recommends execa for security / escaping:
// https://github.com/shelljs/shelljs/wiki/Security-guidelines
// Inspired by https://github.com/xxorax/node-shell-escape/blob/master/shell-escape.js
export function escapeShellArg(s: string): string {
let res = `'${s.replace(/'/g, "'\\''")}'`;
res = res.replace(/^(?:'')+/g, '').replace(/\\'''/g, "\\'");
return res;
}

Some files were not shown because too many files have changed in this diff Show More